@posthog/ai 5.2.3 → 6.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.cjs +37 -5
- package/dist/anthropic/index.cjs.map +1 -1
- package/dist/anthropic/index.mjs +37 -5
- package/dist/anthropic/index.mjs.map +1 -1
- package/dist/gemini/index.cjs +67 -25
- package/dist/gemini/index.cjs.map +1 -1
- package/dist/gemini/index.d.ts +0 -1
- package/dist/gemini/index.mjs +67 -25
- package/dist/gemini/index.mjs.map +1 -1
- package/dist/index.cjs +384 -165
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +3 -3
- package/dist/index.mjs +385 -166
- package/dist/index.mjs.map +1 -1
- package/dist/langchain/index.cjs +29 -9
- package/dist/langchain/index.cjs.map +1 -1
- package/dist/langchain/index.d.ts +1 -0
- package/dist/langchain/index.mjs +29 -9
- package/dist/langchain/index.mjs.map +1 -1
- package/dist/openai/index.cjs +106 -5
- package/dist/openai/index.cjs.map +1 -1
- package/dist/openai/index.mjs +106 -5
- package/dist/openai/index.mjs.map +1 -1
- package/dist/vercel/index.cjs +173 -122
- package/dist/vercel/index.cjs.map +1 -1
- package/dist/vercel/index.d.ts +2 -2
- package/dist/vercel/index.mjs +174 -123
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +4 -3
package/dist/vercel/index.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { wrapLanguageModel } from 'ai';
|
|
2
2
|
import { v4 } from 'uuid';
|
|
3
3
|
import { Buffer } from 'buffer';
|
|
4
4
|
|
|
@@ -34,6 +34,20 @@ const truncate = str => {
|
|
|
34
34
|
return str;
|
|
35
35
|
}
|
|
36
36
|
};
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Extract available tool calls from the request parameters.
|
|
40
|
+
* These are the tools provided to the LLM, not the tool calls in the response.
|
|
41
|
+
*/
|
|
42
|
+
const extractAvailableToolCalls = (provider, params) => {
|
|
43
|
+
{
|
|
44
|
+
// Vercel AI SDK stores tools in params.mode.tools when mode type is 'regular'
|
|
45
|
+
if (params.mode?.type === 'regular' && params.mode.tools) {
|
|
46
|
+
return params.mode.tools;
|
|
47
|
+
}
|
|
48
|
+
return null;
|
|
49
|
+
}
|
|
50
|
+
};
|
|
37
51
|
function sanitizeValues(obj) {
|
|
38
52
|
if (obj === undefined || obj === null) {
|
|
39
53
|
return obj;
|
|
@@ -141,7 +155,7 @@ const sendEventToPosthog = async ({
|
|
|
141
155
|
const mapVercelParams = params => {
|
|
142
156
|
return {
|
|
143
157
|
temperature: params.temperature,
|
|
144
|
-
|
|
158
|
+
max_output_tokens: params.maxOutputTokens,
|
|
145
159
|
top_p: params.topP,
|
|
146
160
|
frequency_penalty: params.frequencyPenalty,
|
|
147
161
|
presence_penalty: params.presencePenalty,
|
|
@@ -149,79 +163,68 @@ const mapVercelParams = params => {
|
|
|
149
163
|
stream: params.stream
|
|
150
164
|
};
|
|
151
165
|
};
|
|
152
|
-
const mapVercelPrompt =
|
|
153
|
-
// normalize single inputs into an array of messages
|
|
154
|
-
let promptsArray;
|
|
155
|
-
if (typeof prompt === 'string') {
|
|
156
|
-
promptsArray = [{
|
|
157
|
-
role: 'user',
|
|
158
|
-
content: prompt
|
|
159
|
-
}];
|
|
160
|
-
} else if (!Array.isArray(prompt)) {
|
|
161
|
-
promptsArray = [prompt];
|
|
162
|
-
} else {
|
|
163
|
-
promptsArray = prompt;
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
+
const mapVercelPrompt = messages => {
|
|
166
167
|
// Map and truncate individual content
|
|
167
|
-
const inputs =
|
|
168
|
-
let content
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
content: {
|
|
168
|
+
const inputs = messages.map(message => {
|
|
169
|
+
let content;
|
|
170
|
+
|
|
171
|
+
// Handle system role which has string content
|
|
172
|
+
if (message.role === 'system') {
|
|
173
|
+
content = [{
|
|
174
|
+
type: 'text',
|
|
175
|
+
text: truncate(String(message.content))
|
|
176
|
+
}];
|
|
177
|
+
} else {
|
|
178
|
+
// Handle other roles which have array content
|
|
179
|
+
if (Array.isArray(message.content)) {
|
|
180
|
+
content = message.content.map(c => {
|
|
181
|
+
if (c.type === 'text') {
|
|
182
|
+
return {
|
|
183
|
+
type: 'text',
|
|
184
|
+
text: truncate(c.text)
|
|
185
|
+
};
|
|
186
|
+
} else if (c.type === 'file') {
|
|
187
|
+
return {
|
|
188
|
+
type: 'file',
|
|
189
189
|
file: c.data instanceof URL ? c.data.toString() : 'raw files not supported',
|
|
190
|
-
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
190
|
+
mediaType: c.mediaType
|
|
191
|
+
};
|
|
192
|
+
} else if (c.type === 'reasoning') {
|
|
193
|
+
return {
|
|
194
|
+
type: 'reasoning',
|
|
195
|
+
text: truncate(c.reasoning)
|
|
196
|
+
};
|
|
197
|
+
} else if (c.type === 'tool-call') {
|
|
198
|
+
return {
|
|
199
|
+
type: 'tool-call',
|
|
197
200
|
toolCallId: c.toolCallId,
|
|
198
201
|
toolName: c.toolName,
|
|
199
|
-
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
type: 'tool-result',
|
|
205
|
-
content: {
|
|
202
|
+
input: c.input
|
|
203
|
+
};
|
|
204
|
+
} else if (c.type === 'tool-result') {
|
|
205
|
+
return {
|
|
206
|
+
type: 'tool-result',
|
|
206
207
|
toolCallId: c.toolCallId,
|
|
207
208
|
toolName: c.toolName,
|
|
208
|
-
|
|
209
|
+
output: c.output,
|
|
209
210
|
isError: c.isError
|
|
210
|
-
}
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
return {
|
|
214
|
+
type: 'text',
|
|
215
|
+
text: ''
|
|
211
216
|
};
|
|
212
|
-
}
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
text: truncate(p.content)
|
|
221
|
-
};
|
|
217
|
+
});
|
|
218
|
+
} else {
|
|
219
|
+
// Fallback for non-array content
|
|
220
|
+
content = [{
|
|
221
|
+
type: 'text',
|
|
222
|
+
text: truncate(String(message.content))
|
|
223
|
+
}];
|
|
224
|
+
}
|
|
222
225
|
}
|
|
223
226
|
return {
|
|
224
|
-
role:
|
|
227
|
+
role: message.role,
|
|
225
228
|
content
|
|
226
229
|
};
|
|
227
230
|
});
|
|
@@ -253,52 +256,75 @@ const mapVercelPrompt = prompt => {
|
|
|
253
256
|
return inputs;
|
|
254
257
|
};
|
|
255
258
|
const mapVercelOutput = result => {
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
}
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
}
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
259
|
+
const content = result.map(item => {
|
|
260
|
+
if (item.type === 'text') {
|
|
261
|
+
return {
|
|
262
|
+
type: 'text',
|
|
263
|
+
text: truncate(item.text)
|
|
264
|
+
};
|
|
265
|
+
}
|
|
266
|
+
if (item.type === 'tool-call') {
|
|
267
|
+
return {
|
|
268
|
+
type: 'tool-call',
|
|
269
|
+
id: item.toolCallId,
|
|
270
|
+
function: {
|
|
271
|
+
name: item.toolName,
|
|
272
|
+
arguments: item.args || JSON.stringify(item.arguments || {})
|
|
273
|
+
}
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
if (item.type === 'reasoning') {
|
|
277
|
+
return {
|
|
278
|
+
type: 'reasoning',
|
|
279
|
+
text: truncate(item.text)
|
|
280
|
+
};
|
|
281
|
+
}
|
|
282
|
+
if (item.type === 'file') {
|
|
283
|
+
// Handle files similar to input mapping - avoid large base64 data
|
|
284
|
+
let fileData;
|
|
285
|
+
if (item.data instanceof URL) {
|
|
286
|
+
fileData = item.data.toString();
|
|
287
|
+
} else if (typeof item.data === 'string') {
|
|
288
|
+
// Check if it's base64 data and potentially large
|
|
289
|
+
if (item.data.startsWith('data:') || item.data.length > 1000) {
|
|
290
|
+
fileData = `[${item.mediaType} file - ${item.data.length} bytes]`;
|
|
291
|
+
} else {
|
|
292
|
+
fileData = item.data;
|
|
293
|
+
}
|
|
294
|
+
} else {
|
|
295
|
+
fileData = `[binary ${item.mediaType} file]`;
|
|
296
|
+
}
|
|
297
|
+
return {
|
|
298
|
+
type: 'file',
|
|
299
|
+
name: 'generated_file',
|
|
300
|
+
mediaType: item.mediaType,
|
|
301
|
+
data: fileData
|
|
302
|
+
};
|
|
303
|
+
}
|
|
304
|
+
if (item.type === 'source') {
|
|
305
|
+
return {
|
|
306
|
+
type: 'source',
|
|
307
|
+
sourceType: item.sourceType,
|
|
308
|
+
id: item.id,
|
|
309
|
+
url: item.url || '',
|
|
310
|
+
title: item.title || ''
|
|
311
|
+
};
|
|
312
|
+
}
|
|
313
|
+
// Fallback for unknown types - try to extract text if possible
|
|
314
|
+
return {
|
|
315
|
+
type: 'text',
|
|
316
|
+
text: truncate(JSON.stringify(item))
|
|
317
|
+
};
|
|
318
|
+
});
|
|
319
|
+
if (content.length > 0) {
|
|
294
320
|
return [{
|
|
295
|
-
|
|
296
|
-
|
|
321
|
+
role: 'assistant',
|
|
322
|
+
content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
|
|
297
323
|
}];
|
|
298
324
|
}
|
|
299
325
|
// otherwise stringify and truncate
|
|
300
326
|
try {
|
|
301
|
-
const jsonOutput = JSON.stringify(
|
|
327
|
+
const jsonOutput = JSON.stringify(result);
|
|
302
328
|
return [{
|
|
303
329
|
content: truncate(jsonOutput),
|
|
304
330
|
role: 'assistant'
|
|
@@ -324,14 +350,14 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
324
350
|
...options,
|
|
325
351
|
...mapVercelParams(params)
|
|
326
352
|
};
|
|
353
|
+
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
327
354
|
try {
|
|
328
355
|
const result = await doGenerate();
|
|
329
|
-
const latency = (Date.now() - startTime) / 1000;
|
|
330
356
|
const modelId = options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId);
|
|
331
357
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
332
358
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
333
|
-
const content = mapVercelOutput(result);
|
|
334
|
-
|
|
359
|
+
const content = mapVercelOutput(result.content);
|
|
360
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
335
361
|
const providerMetadata = result.providerMetadata;
|
|
336
362
|
const additionalTokenValues = {
|
|
337
363
|
...(providerMetadata?.openai?.reasoningTokens ? {
|
|
@@ -352,19 +378,17 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
352
378
|
model: modelId,
|
|
353
379
|
provider: provider,
|
|
354
380
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
355
|
-
output:
|
|
356
|
-
content,
|
|
357
|
-
role: 'assistant'
|
|
358
|
-
}],
|
|
381
|
+
output: content,
|
|
359
382
|
latency,
|
|
360
383
|
baseURL,
|
|
361
384
|
params: mergedParams,
|
|
362
385
|
httpStatus: 200,
|
|
363
386
|
usage: {
|
|
364
|
-
inputTokens: result.usage.
|
|
365
|
-
outputTokens: result.usage.
|
|
387
|
+
inputTokens: result.usage.inputTokens,
|
|
388
|
+
outputTokens: result.usage.outputTokens,
|
|
366
389
|
...additionalTokenValues
|
|
367
390
|
},
|
|
391
|
+
tools: availableTools,
|
|
368
392
|
captureImmediate: options.posthogCaptureImmediate
|
|
369
393
|
});
|
|
370
394
|
return result;
|
|
@@ -388,6 +412,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
388
412
|
},
|
|
389
413
|
isError: true,
|
|
390
414
|
error: truncate(JSON.stringify(error)),
|
|
415
|
+
tools: availableTools,
|
|
391
416
|
captureImmediate: options.posthogCaptureImmediate
|
|
392
417
|
});
|
|
393
418
|
throw error;
|
|
@@ -399,6 +424,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
399
424
|
}) => {
|
|
400
425
|
const startTime = Date.now();
|
|
401
426
|
let generatedText = '';
|
|
427
|
+
let reasoningText = '';
|
|
402
428
|
let usage = {};
|
|
403
429
|
const mergedParams = {
|
|
404
430
|
...options,
|
|
@@ -406,7 +432,9 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
406
432
|
};
|
|
407
433
|
const modelId = options.posthogModelOverride ?? model.modelId;
|
|
408
434
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
435
|
+
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
409
436
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
437
|
+
|
|
410
438
|
try {
|
|
411
439
|
const {
|
|
412
440
|
stream,
|
|
@@ -414,13 +442,17 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
414
442
|
} = await doStream();
|
|
415
443
|
const transformStream = new TransformStream({
|
|
416
444
|
transform(chunk, controller) {
|
|
445
|
+
// Handle new v5 streaming patterns
|
|
417
446
|
if (chunk.type === 'text-delta') {
|
|
418
|
-
generatedText += chunk.
|
|
447
|
+
generatedText += chunk.delta;
|
|
448
|
+
}
|
|
449
|
+
if (chunk.type === 'reasoning-delta') {
|
|
450
|
+
reasoningText += chunk.delta; // New in v5
|
|
419
451
|
}
|
|
420
452
|
if (chunk.type === 'finish') {
|
|
421
453
|
usage = {
|
|
422
|
-
inputTokens: chunk.usage?.
|
|
423
|
-
outputTokens: chunk.usage?.
|
|
454
|
+
inputTokens: chunk.usage?.inputTokens,
|
|
455
|
+
outputTokens: chunk.usage?.outputTokens
|
|
424
456
|
};
|
|
425
457
|
if (chunk.providerMetadata?.openai?.reasoningTokens) {
|
|
426
458
|
usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens;
|
|
@@ -439,6 +471,26 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
439
471
|
},
|
|
440
472
|
flush: async () => {
|
|
441
473
|
const latency = (Date.now() - startTime) / 1000;
|
|
474
|
+
// Build content array similar to mapVercelOutput structure
|
|
475
|
+
const content = [];
|
|
476
|
+
if (reasoningText) {
|
|
477
|
+
content.push({
|
|
478
|
+
type: 'reasoning',
|
|
479
|
+
text: truncate(reasoningText)
|
|
480
|
+
});
|
|
481
|
+
}
|
|
482
|
+
if (generatedText) {
|
|
483
|
+
content.push({
|
|
484
|
+
type: 'text',
|
|
485
|
+
text: truncate(generatedText)
|
|
486
|
+
});
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
// Structure output like mapVercelOutput does
|
|
490
|
+
const output = content.length > 0 ? [{
|
|
491
|
+
role: 'assistant',
|
|
492
|
+
content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
|
|
493
|
+
}] : [];
|
|
442
494
|
await sendEventToPosthog({
|
|
443
495
|
client: phClient,
|
|
444
496
|
distinctId: options.posthogDistinctId,
|
|
@@ -446,15 +498,13 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
446
498
|
model: modelId,
|
|
447
499
|
provider: provider,
|
|
448
500
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
449
|
-
output:
|
|
450
|
-
content: generatedText,
|
|
451
|
-
role: 'assistant'
|
|
452
|
-
}],
|
|
501
|
+
output: output,
|
|
453
502
|
latency,
|
|
454
503
|
baseURL,
|
|
455
504
|
params: mergedParams,
|
|
456
505
|
httpStatus: 200,
|
|
457
506
|
usage,
|
|
507
|
+
tools: availableTools,
|
|
458
508
|
captureImmediate: options.posthogCaptureImmediate
|
|
459
509
|
});
|
|
460
510
|
}
|
|
@@ -482,6 +532,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
482
532
|
},
|
|
483
533
|
isError: true,
|
|
484
534
|
error: truncate(JSON.stringify(error)),
|
|
535
|
+
tools: availableTools,
|
|
485
536
|
captureImmediate: options.posthogCaptureImmediate
|
|
486
537
|
});
|
|
487
538
|
throw error;
|
|
@@ -497,7 +548,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
497
548
|
posthogTraceId: traceId,
|
|
498
549
|
posthogDistinctId: options.posthogDistinctId
|
|
499
550
|
});
|
|
500
|
-
const wrappedModel =
|
|
551
|
+
const wrappedModel = wrapLanguageModel({
|
|
501
552
|
model,
|
|
502
553
|
middleware
|
|
503
554
|
});
|