@posthog/ai 5.2.3 → 6.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.cjs +37 -5
- package/dist/anthropic/index.cjs.map +1 -1
- package/dist/anthropic/index.mjs +37 -5
- package/dist/anthropic/index.mjs.map +1 -1
- package/dist/gemini/index.cjs +67 -25
- package/dist/gemini/index.cjs.map +1 -1
- package/dist/gemini/index.d.ts +0 -1
- package/dist/gemini/index.mjs +67 -25
- package/dist/gemini/index.mjs.map +1 -1
- package/dist/index.cjs +384 -165
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +3 -3
- package/dist/index.mjs +385 -166
- package/dist/index.mjs.map +1 -1
- package/dist/langchain/index.cjs +29 -9
- package/dist/langchain/index.cjs.map +1 -1
- package/dist/langchain/index.d.ts +1 -0
- package/dist/langchain/index.mjs +29 -9
- package/dist/langchain/index.mjs.map +1 -1
- package/dist/openai/index.cjs +106 -5
- package/dist/openai/index.cjs.map +1 -1
- package/dist/openai/index.mjs +106 -5
- package/dist/openai/index.mjs.map +1 -1
- package/dist/vercel/index.cjs +173 -122
- package/dist/vercel/index.cjs.map +1 -1
- package/dist/vercel/index.d.ts +2 -2
- package/dist/vercel/index.mjs +174 -123
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +4 -3
package/dist/index.mjs
CHANGED
|
@@ -2,7 +2,7 @@ import { OpenAI, AzureOpenAI } from 'openai';
|
|
|
2
2
|
import * as uuid from 'uuid';
|
|
3
3
|
import { v4 } from 'uuid';
|
|
4
4
|
import { Buffer } from 'buffer';
|
|
5
|
-
import {
|
|
5
|
+
import { wrapLanguageModel } from 'ai';
|
|
6
6
|
import AnthropicOriginal from '@anthropic-ai/sdk';
|
|
7
7
|
import { GoogleGenAI } from '@google/genai';
|
|
8
8
|
|
|
@@ -23,30 +23,171 @@ const getModelParams = params => {
|
|
|
23
23
|
return modelParams;
|
|
24
24
|
};
|
|
25
25
|
const formatResponseAnthropic = response => {
|
|
26
|
-
// Example approach if "response.content" holds array of text segments, etc.
|
|
27
26
|
const output = [];
|
|
27
|
+
const content = [];
|
|
28
28
|
for (const choice of response.content ?? []) {
|
|
29
|
-
if (choice?.text) {
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
29
|
+
if (choice?.type === 'text' && choice?.text) {
|
|
30
|
+
content.push({
|
|
31
|
+
type: 'text',
|
|
32
|
+
text: choice.text
|
|
33
|
+
});
|
|
34
|
+
} else if (choice?.type === 'tool_use' && choice?.name && choice?.id) {
|
|
35
|
+
content.push({
|
|
36
|
+
type: 'function',
|
|
37
|
+
id: choice.id,
|
|
38
|
+
function: {
|
|
39
|
+
name: choice.name,
|
|
40
|
+
arguments: choice.input || {}
|
|
41
|
+
}
|
|
33
42
|
});
|
|
34
43
|
}
|
|
35
44
|
}
|
|
45
|
+
if (content.length > 0) {
|
|
46
|
+
output.push({
|
|
47
|
+
role: 'assistant',
|
|
48
|
+
content
|
|
49
|
+
});
|
|
50
|
+
}
|
|
36
51
|
return output;
|
|
37
52
|
};
|
|
38
53
|
const formatResponseOpenAI = response => {
|
|
39
54
|
const output = [];
|
|
40
|
-
|
|
41
|
-
|
|
55
|
+
if (response.choices) {
|
|
56
|
+
for (const choice of response.choices) {
|
|
57
|
+
const content = [];
|
|
58
|
+
let role = 'assistant';
|
|
59
|
+
if (choice.message) {
|
|
60
|
+
if (choice.message.role) {
|
|
61
|
+
role = choice.message.role;
|
|
62
|
+
}
|
|
63
|
+
if (choice.message.content) {
|
|
64
|
+
content.push({
|
|
65
|
+
type: 'text',
|
|
66
|
+
text: choice.message.content
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
if (choice.message.tool_calls) {
|
|
70
|
+
for (const toolCall of choice.message.tool_calls) {
|
|
71
|
+
content.push({
|
|
72
|
+
type: 'function',
|
|
73
|
+
id: toolCall.id,
|
|
74
|
+
function: {
|
|
75
|
+
name: toolCall.function.name,
|
|
76
|
+
arguments: toolCall.function.arguments
|
|
77
|
+
}
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
if (content.length > 0) {
|
|
83
|
+
output.push({
|
|
84
|
+
role,
|
|
85
|
+
content
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
// Handle Responses API format
|
|
91
|
+
if (response.output) {
|
|
92
|
+
const content = [];
|
|
93
|
+
let role = 'assistant';
|
|
94
|
+
for (const item of response.output) {
|
|
95
|
+
if (item.type === 'message') {
|
|
96
|
+
role = item.role;
|
|
97
|
+
if (item.content && Array.isArray(item.content)) {
|
|
98
|
+
for (const contentItem of item.content) {
|
|
99
|
+
if (contentItem.type === 'output_text' && contentItem.text) {
|
|
100
|
+
content.push({
|
|
101
|
+
type: 'text',
|
|
102
|
+
text: contentItem.text
|
|
103
|
+
});
|
|
104
|
+
} else if (contentItem.text) {
|
|
105
|
+
content.push({
|
|
106
|
+
type: 'text',
|
|
107
|
+
text: contentItem.text
|
|
108
|
+
});
|
|
109
|
+
} else if (contentItem.type === 'input_image' && contentItem.image_url) {
|
|
110
|
+
content.push({
|
|
111
|
+
type: 'image',
|
|
112
|
+
image: contentItem.image_url
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
} else if (item.content) {
|
|
117
|
+
content.push({
|
|
118
|
+
type: 'text',
|
|
119
|
+
text: String(item.content)
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
} else if (item.type === 'function_call') {
|
|
123
|
+
content.push({
|
|
124
|
+
type: 'function',
|
|
125
|
+
id: item.call_id || item.id || '',
|
|
126
|
+
function: {
|
|
127
|
+
name: item.name,
|
|
128
|
+
arguments: item.arguments || {}
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
if (content.length > 0) {
|
|
42
134
|
output.push({
|
|
43
|
-
role
|
|
44
|
-
content
|
|
135
|
+
role,
|
|
136
|
+
content
|
|
45
137
|
});
|
|
46
138
|
}
|
|
47
139
|
}
|
|
48
140
|
return output;
|
|
49
141
|
};
|
|
142
|
+
const formatResponseGemini = response => {
|
|
143
|
+
const output = [];
|
|
144
|
+
if (response.candidates && Array.isArray(response.candidates)) {
|
|
145
|
+
for (const candidate of response.candidates) {
|
|
146
|
+
if (candidate.content && candidate.content.parts) {
|
|
147
|
+
const content = [];
|
|
148
|
+
for (const part of candidate.content.parts) {
|
|
149
|
+
if (part.text) {
|
|
150
|
+
content.push({
|
|
151
|
+
type: 'text',
|
|
152
|
+
text: part.text
|
|
153
|
+
});
|
|
154
|
+
} else if (part.functionCall) {
|
|
155
|
+
content.push({
|
|
156
|
+
type: 'function',
|
|
157
|
+
function: {
|
|
158
|
+
name: part.functionCall.name,
|
|
159
|
+
arguments: part.functionCall.args
|
|
160
|
+
}
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
if (content.length > 0) {
|
|
165
|
+
output.push({
|
|
166
|
+
role: 'assistant',
|
|
167
|
+
content
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
} else if (candidate.text) {
|
|
171
|
+
output.push({
|
|
172
|
+
role: 'assistant',
|
|
173
|
+
content: [{
|
|
174
|
+
type: 'text',
|
|
175
|
+
text: candidate.text
|
|
176
|
+
}]
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
} else if (response.text) {
|
|
181
|
+
output.push({
|
|
182
|
+
role: 'assistant',
|
|
183
|
+
content: [{
|
|
184
|
+
type: 'text',
|
|
185
|
+
text: response.text
|
|
186
|
+
}]
|
|
187
|
+
});
|
|
188
|
+
}
|
|
189
|
+
return output;
|
|
190
|
+
};
|
|
50
191
|
const mergeSystemPrompt = (params, provider) => {
|
|
51
192
|
{
|
|
52
193
|
const messages = params.messages || [];
|
|
@@ -76,6 +217,35 @@ const truncate = str => {
|
|
|
76
217
|
return str;
|
|
77
218
|
}
|
|
78
219
|
};
|
|
220
|
+
/**
|
|
221
|
+
* Extract available tool calls from the request parameters.
|
|
222
|
+
* These are the tools provided to the LLM, not the tool calls in the response.
|
|
223
|
+
*/
|
|
224
|
+
const extractAvailableToolCalls = (provider, params) => {
|
|
225
|
+
if (provider === 'anthropic') {
|
|
226
|
+
if (params.tools) {
|
|
227
|
+
return params.tools;
|
|
228
|
+
}
|
|
229
|
+
return null;
|
|
230
|
+
} else if (provider === 'gemini') {
|
|
231
|
+
if (params.config && params.config.tools) {
|
|
232
|
+
return params.config.tools;
|
|
233
|
+
}
|
|
234
|
+
return null;
|
|
235
|
+
} else if (provider === 'openai') {
|
|
236
|
+
if (params.tools) {
|
|
237
|
+
return params.tools;
|
|
238
|
+
}
|
|
239
|
+
return null;
|
|
240
|
+
} else if (provider === 'vercel') {
|
|
241
|
+
// Vercel AI SDK stores tools in params.mode.tools when mode type is 'regular'
|
|
242
|
+
if (params.mode?.type === 'regular' && params.mode.tools) {
|
|
243
|
+
return params.mode.tools;
|
|
244
|
+
}
|
|
245
|
+
return null;
|
|
246
|
+
}
|
|
247
|
+
return null;
|
|
248
|
+
};
|
|
79
249
|
function sanitizeValues(obj) {
|
|
80
250
|
if (obj === undefined || obj === null) {
|
|
81
251
|
return obj;
|
|
@@ -245,6 +415,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
245
415
|
}
|
|
246
416
|
}
|
|
247
417
|
const latency = (Date.now() - startTime) / 1000;
|
|
418
|
+
const availableTools = extractAvailableToolCalls('openai', openAIParams);
|
|
248
419
|
await sendEventToPosthog({
|
|
249
420
|
client: this.phClient,
|
|
250
421
|
distinctId: posthogDistinctId,
|
|
@@ -261,6 +432,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
261
432
|
params: body,
|
|
262
433
|
httpStatus: 200,
|
|
263
434
|
usage,
|
|
435
|
+
tools: availableTools,
|
|
264
436
|
captureImmediate: posthogCaptureImmediate
|
|
265
437
|
});
|
|
266
438
|
} catch (error) {
|
|
@@ -295,6 +467,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
295
467
|
const wrappedPromise = parentPromise.then(async result => {
|
|
296
468
|
if ('choices' in result) {
|
|
297
469
|
const latency = (Date.now() - startTime) / 1000;
|
|
470
|
+
const availableTools = extractAvailableToolCalls('openai', openAIParams);
|
|
298
471
|
await sendEventToPosthog({
|
|
299
472
|
client: this.phClient,
|
|
300
473
|
distinctId: posthogDistinctId,
|
|
@@ -313,6 +486,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
313
486
|
reasoningTokens: result.usage?.completion_tokens_details?.reasoning_tokens ?? 0,
|
|
314
487
|
cacheReadInputTokens: result.usage?.prompt_tokens_details?.cached_tokens ?? 0
|
|
315
488
|
},
|
|
489
|
+
tools: availableTools,
|
|
316
490
|
captureImmediate: posthogCaptureImmediate
|
|
317
491
|
});
|
|
318
492
|
}
|
|
@@ -389,6 +563,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
389
563
|
}
|
|
390
564
|
}
|
|
391
565
|
const latency = (Date.now() - startTime) / 1000;
|
|
566
|
+
const availableTools = extractAvailableToolCalls('openai', openAIParams);
|
|
392
567
|
await sendEventToPosthog({
|
|
393
568
|
client: this.phClient,
|
|
394
569
|
distinctId: posthogDistinctId,
|
|
@@ -403,6 +578,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
403
578
|
params: body,
|
|
404
579
|
httpStatus: 200,
|
|
405
580
|
usage,
|
|
581
|
+
tools: availableTools,
|
|
406
582
|
captureImmediate: posthogCaptureImmediate
|
|
407
583
|
});
|
|
408
584
|
} catch (error) {
|
|
@@ -437,6 +613,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
437
613
|
const wrappedPromise = parentPromise.then(async result => {
|
|
438
614
|
if ('output' in result) {
|
|
439
615
|
const latency = (Date.now() - startTime) / 1000;
|
|
616
|
+
const availableTools = extractAvailableToolCalls('openai', openAIParams);
|
|
440
617
|
await sendEventToPosthog({
|
|
441
618
|
client: this.phClient,
|
|
442
619
|
distinctId: posthogDistinctId,
|
|
@@ -445,7 +622,9 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
445
622
|
model: openAIParams.model,
|
|
446
623
|
provider: 'openai',
|
|
447
624
|
input: openAIParams.input,
|
|
448
|
-
output:
|
|
625
|
+
output: formatResponseOpenAI({
|
|
626
|
+
output: result.output
|
|
627
|
+
}),
|
|
449
628
|
latency,
|
|
450
629
|
baseURL: this.baseURL ?? '',
|
|
451
630
|
params: body,
|
|
@@ -456,6 +635,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
456
635
|
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
457
636
|
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
|
|
458
637
|
},
|
|
638
|
+
tools: availableTools,
|
|
459
639
|
captureImmediate: posthogCaptureImmediate
|
|
460
640
|
});
|
|
461
641
|
}
|
|
@@ -935,7 +1115,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
|
|
|
935
1115
|
const mapVercelParams = params => {
|
|
936
1116
|
return {
|
|
937
1117
|
temperature: params.temperature,
|
|
938
|
-
|
|
1118
|
+
max_output_tokens: params.maxOutputTokens,
|
|
939
1119
|
top_p: params.topP,
|
|
940
1120
|
frequency_penalty: params.frequencyPenalty,
|
|
941
1121
|
presence_penalty: params.presencePenalty,
|
|
@@ -943,78 +1123,67 @@ const mapVercelParams = params => {
|
|
|
943
1123
|
stream: params.stream
|
|
944
1124
|
};
|
|
945
1125
|
};
|
|
946
|
-
const mapVercelPrompt =
|
|
947
|
-
// normalize single inputs into an array of messages
|
|
948
|
-
let promptsArray;
|
|
949
|
-
if (typeof prompt === 'string') {
|
|
950
|
-
promptsArray = [{
|
|
951
|
-
role: 'user',
|
|
952
|
-
content: prompt
|
|
953
|
-
}];
|
|
954
|
-
} else if (!Array.isArray(prompt)) {
|
|
955
|
-
promptsArray = [prompt];
|
|
956
|
-
} else {
|
|
957
|
-
promptsArray = prompt;
|
|
958
|
-
}
|
|
1126
|
+
const mapVercelPrompt = messages => {
|
|
959
1127
|
// Map and truncate individual content
|
|
960
|
-
const inputs =
|
|
961
|
-
let content
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
}
|
|
977
|
-
}
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
type: 'file',
|
|
981
|
-
content: {
|
|
1128
|
+
const inputs = messages.map(message => {
|
|
1129
|
+
let content;
|
|
1130
|
+
// Handle system role which has string content
|
|
1131
|
+
if (message.role === 'system') {
|
|
1132
|
+
content = [{
|
|
1133
|
+
type: 'text',
|
|
1134
|
+
text: truncate(String(message.content))
|
|
1135
|
+
}];
|
|
1136
|
+
} else {
|
|
1137
|
+
// Handle other roles which have array content
|
|
1138
|
+
if (Array.isArray(message.content)) {
|
|
1139
|
+
content = message.content.map(c => {
|
|
1140
|
+
if (c.type === 'text') {
|
|
1141
|
+
return {
|
|
1142
|
+
type: 'text',
|
|
1143
|
+
text: truncate(c.text)
|
|
1144
|
+
};
|
|
1145
|
+
} else if (c.type === 'file') {
|
|
1146
|
+
return {
|
|
1147
|
+
type: 'file',
|
|
982
1148
|
file: c.data instanceof URL ? c.data.toString() : 'raw files not supported',
|
|
983
|
-
|
|
984
|
-
}
|
|
985
|
-
}
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
1149
|
+
mediaType: c.mediaType
|
|
1150
|
+
};
|
|
1151
|
+
} else if (c.type === 'reasoning') {
|
|
1152
|
+
return {
|
|
1153
|
+
type: 'reasoning',
|
|
1154
|
+
text: truncate(c.reasoning)
|
|
1155
|
+
};
|
|
1156
|
+
} else if (c.type === 'tool-call') {
|
|
1157
|
+
return {
|
|
1158
|
+
type: 'tool-call',
|
|
990
1159
|
toolCallId: c.toolCallId,
|
|
991
1160
|
toolName: c.toolName,
|
|
992
|
-
|
|
993
|
-
}
|
|
994
|
-
}
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
type: 'tool-result',
|
|
998
|
-
content: {
|
|
1161
|
+
input: c.input
|
|
1162
|
+
};
|
|
1163
|
+
} else if (c.type === 'tool-result') {
|
|
1164
|
+
return {
|
|
1165
|
+
type: 'tool-result',
|
|
999
1166
|
toolCallId: c.toolCallId,
|
|
1000
1167
|
toolName: c.toolName,
|
|
1001
|
-
|
|
1168
|
+
output: c.output,
|
|
1002
1169
|
isError: c.isError
|
|
1003
|
-
}
|
|
1170
|
+
};
|
|
1171
|
+
}
|
|
1172
|
+
return {
|
|
1173
|
+
type: 'text',
|
|
1174
|
+
text: ''
|
|
1004
1175
|
};
|
|
1005
|
-
}
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
text: truncate(p.content)
|
|
1014
|
-
};
|
|
1176
|
+
});
|
|
1177
|
+
} else {
|
|
1178
|
+
// Fallback for non-array content
|
|
1179
|
+
content = [{
|
|
1180
|
+
type: 'text',
|
|
1181
|
+
text: truncate(String(message.content))
|
|
1182
|
+
}];
|
|
1183
|
+
}
|
|
1015
1184
|
}
|
|
1016
1185
|
return {
|
|
1017
|
-
role:
|
|
1186
|
+
role: message.role,
|
|
1018
1187
|
content
|
|
1019
1188
|
};
|
|
1020
1189
|
});
|
|
@@ -1046,52 +1215,75 @@ const mapVercelPrompt = prompt => {
|
|
|
1046
1215
|
return inputs;
|
|
1047
1216
|
};
|
|
1048
1217
|
const mapVercelOutput = result => {
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
}
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
}
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1218
|
+
const content = result.map(item => {
|
|
1219
|
+
if (item.type === 'text') {
|
|
1220
|
+
return {
|
|
1221
|
+
type: 'text',
|
|
1222
|
+
text: truncate(item.text)
|
|
1223
|
+
};
|
|
1224
|
+
}
|
|
1225
|
+
if (item.type === 'tool-call') {
|
|
1226
|
+
return {
|
|
1227
|
+
type: 'tool-call',
|
|
1228
|
+
id: item.toolCallId,
|
|
1229
|
+
function: {
|
|
1230
|
+
name: item.toolName,
|
|
1231
|
+
arguments: item.args || JSON.stringify(item.arguments || {})
|
|
1232
|
+
}
|
|
1233
|
+
};
|
|
1234
|
+
}
|
|
1235
|
+
if (item.type === 'reasoning') {
|
|
1236
|
+
return {
|
|
1237
|
+
type: 'reasoning',
|
|
1238
|
+
text: truncate(item.text)
|
|
1239
|
+
};
|
|
1240
|
+
}
|
|
1241
|
+
if (item.type === 'file') {
|
|
1242
|
+
// Handle files similar to input mapping - avoid large base64 data
|
|
1243
|
+
let fileData;
|
|
1244
|
+
if (item.data instanceof URL) {
|
|
1245
|
+
fileData = item.data.toString();
|
|
1246
|
+
} else if (typeof item.data === 'string') {
|
|
1247
|
+
// Check if it's base64 data and potentially large
|
|
1248
|
+
if (item.data.startsWith('data:') || item.data.length > 1000) {
|
|
1249
|
+
fileData = `[${item.mediaType} file - ${item.data.length} bytes]`;
|
|
1250
|
+
} else {
|
|
1251
|
+
fileData = item.data;
|
|
1252
|
+
}
|
|
1253
|
+
} else {
|
|
1254
|
+
fileData = `[binary ${item.mediaType} file]`;
|
|
1255
|
+
}
|
|
1256
|
+
return {
|
|
1257
|
+
type: 'file',
|
|
1258
|
+
name: 'generated_file',
|
|
1259
|
+
mediaType: item.mediaType,
|
|
1260
|
+
data: fileData
|
|
1261
|
+
};
|
|
1262
|
+
}
|
|
1263
|
+
if (item.type === 'source') {
|
|
1264
|
+
return {
|
|
1265
|
+
type: 'source',
|
|
1266
|
+
sourceType: item.sourceType,
|
|
1267
|
+
id: item.id,
|
|
1268
|
+
url: item.url || '',
|
|
1269
|
+
title: item.title || ''
|
|
1270
|
+
};
|
|
1271
|
+
}
|
|
1272
|
+
// Fallback for unknown types - try to extract text if possible
|
|
1273
|
+
return {
|
|
1274
|
+
type: 'text',
|
|
1275
|
+
text: truncate(JSON.stringify(item))
|
|
1276
|
+
};
|
|
1277
|
+
});
|
|
1278
|
+
if (content.length > 0) {
|
|
1087
1279
|
return [{
|
|
1088
|
-
|
|
1089
|
-
|
|
1280
|
+
role: 'assistant',
|
|
1281
|
+
content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
|
|
1090
1282
|
}];
|
|
1091
1283
|
}
|
|
1092
1284
|
// otherwise stringify and truncate
|
|
1093
1285
|
try {
|
|
1094
|
-
const jsonOutput = JSON.stringify(
|
|
1286
|
+
const jsonOutput = JSON.stringify(result);
|
|
1095
1287
|
return [{
|
|
1096
1288
|
content: truncate(jsonOutput),
|
|
1097
1289
|
role: 'assistant'
|
|
@@ -1117,14 +1309,14 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1117
1309
|
...options,
|
|
1118
1310
|
...mapVercelParams(params)
|
|
1119
1311
|
};
|
|
1312
|
+
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
1120
1313
|
try {
|
|
1121
1314
|
const result = await doGenerate();
|
|
1122
|
-
const latency = (Date.now() - startTime) / 1000;
|
|
1123
1315
|
const modelId = options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId);
|
|
1124
1316
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
1125
1317
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
1126
|
-
const content = mapVercelOutput(result);
|
|
1127
|
-
|
|
1318
|
+
const content = mapVercelOutput(result.content);
|
|
1319
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
1128
1320
|
const providerMetadata = result.providerMetadata;
|
|
1129
1321
|
const additionalTokenValues = {
|
|
1130
1322
|
...(providerMetadata?.openai?.reasoningTokens ? {
|
|
@@ -1145,19 +1337,17 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1145
1337
|
model: modelId,
|
|
1146
1338
|
provider: provider,
|
|
1147
1339
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
1148
|
-
output:
|
|
1149
|
-
content,
|
|
1150
|
-
role: 'assistant'
|
|
1151
|
-
}],
|
|
1340
|
+
output: content,
|
|
1152
1341
|
latency,
|
|
1153
1342
|
baseURL,
|
|
1154
1343
|
params: mergedParams,
|
|
1155
1344
|
httpStatus: 200,
|
|
1156
1345
|
usage: {
|
|
1157
|
-
inputTokens: result.usage.
|
|
1158
|
-
outputTokens: result.usage.
|
|
1346
|
+
inputTokens: result.usage.inputTokens,
|
|
1347
|
+
outputTokens: result.usage.outputTokens,
|
|
1159
1348
|
...additionalTokenValues
|
|
1160
1349
|
},
|
|
1350
|
+
tools: availableTools,
|
|
1161
1351
|
captureImmediate: options.posthogCaptureImmediate
|
|
1162
1352
|
});
|
|
1163
1353
|
return result;
|
|
@@ -1181,6 +1371,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1181
1371
|
},
|
|
1182
1372
|
isError: true,
|
|
1183
1373
|
error: truncate(JSON.stringify(error)),
|
|
1374
|
+
tools: availableTools,
|
|
1184
1375
|
captureImmediate: options.posthogCaptureImmediate
|
|
1185
1376
|
});
|
|
1186
1377
|
throw error;
|
|
@@ -1192,6 +1383,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1192
1383
|
}) => {
|
|
1193
1384
|
const startTime = Date.now();
|
|
1194
1385
|
let generatedText = '';
|
|
1386
|
+
let reasoningText = '';
|
|
1195
1387
|
let usage = {};
|
|
1196
1388
|
const mergedParams = {
|
|
1197
1389
|
...options,
|
|
@@ -1199,6 +1391,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1199
1391
|
};
|
|
1200
1392
|
const modelId = options.posthogModelOverride ?? model.modelId;
|
|
1201
1393
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
1394
|
+
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
1202
1395
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
1203
1396
|
try {
|
|
1204
1397
|
const {
|
|
@@ -1207,13 +1400,17 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1207
1400
|
} = await doStream();
|
|
1208
1401
|
const transformStream = new TransformStream({
|
|
1209
1402
|
transform(chunk, controller) {
|
|
1403
|
+
// Handle new v5 streaming patterns
|
|
1210
1404
|
if (chunk.type === 'text-delta') {
|
|
1211
|
-
generatedText += chunk.
|
|
1405
|
+
generatedText += chunk.delta;
|
|
1406
|
+
}
|
|
1407
|
+
if (chunk.type === 'reasoning-delta') {
|
|
1408
|
+
reasoningText += chunk.delta; // New in v5
|
|
1212
1409
|
}
|
|
1213
1410
|
if (chunk.type === 'finish') {
|
|
1214
1411
|
usage = {
|
|
1215
|
-
inputTokens: chunk.usage?.
|
|
1216
|
-
outputTokens: chunk.usage?.
|
|
1412
|
+
inputTokens: chunk.usage?.inputTokens,
|
|
1413
|
+
outputTokens: chunk.usage?.outputTokens
|
|
1217
1414
|
};
|
|
1218
1415
|
if (chunk.providerMetadata?.openai?.reasoningTokens) {
|
|
1219
1416
|
usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens;
|
|
@@ -1232,6 +1429,25 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1232
1429
|
},
|
|
1233
1430
|
flush: async () => {
|
|
1234
1431
|
const latency = (Date.now() - startTime) / 1000;
|
|
1432
|
+
// Build content array similar to mapVercelOutput structure
|
|
1433
|
+
const content = [];
|
|
1434
|
+
if (reasoningText) {
|
|
1435
|
+
content.push({
|
|
1436
|
+
type: 'reasoning',
|
|
1437
|
+
text: truncate(reasoningText)
|
|
1438
|
+
});
|
|
1439
|
+
}
|
|
1440
|
+
if (generatedText) {
|
|
1441
|
+
content.push({
|
|
1442
|
+
type: 'text',
|
|
1443
|
+
text: truncate(generatedText)
|
|
1444
|
+
});
|
|
1445
|
+
}
|
|
1446
|
+
// Structure output like mapVercelOutput does
|
|
1447
|
+
const output = content.length > 0 ? [{
|
|
1448
|
+
role: 'assistant',
|
|
1449
|
+
content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
|
|
1450
|
+
}] : [];
|
|
1235
1451
|
await sendEventToPosthog({
|
|
1236
1452
|
client: phClient,
|
|
1237
1453
|
distinctId: options.posthogDistinctId,
|
|
@@ -1239,15 +1455,13 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1239
1455
|
model: modelId,
|
|
1240
1456
|
provider: provider,
|
|
1241
1457
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
1242
|
-
output:
|
|
1243
|
-
content: generatedText,
|
|
1244
|
-
role: 'assistant'
|
|
1245
|
-
}],
|
|
1458
|
+
output: output,
|
|
1246
1459
|
latency,
|
|
1247
1460
|
baseURL,
|
|
1248
1461
|
params: mergedParams,
|
|
1249
1462
|
httpStatus: 200,
|
|
1250
1463
|
usage,
|
|
1464
|
+
tools: availableTools,
|
|
1251
1465
|
captureImmediate: options.posthogCaptureImmediate
|
|
1252
1466
|
});
|
|
1253
1467
|
}
|
|
@@ -1275,6 +1489,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1275
1489
|
},
|
|
1276
1490
|
isError: true,
|
|
1277
1491
|
error: truncate(JSON.stringify(error)),
|
|
1492
|
+
tools: availableTools,
|
|
1278
1493
|
captureImmediate: options.posthogCaptureImmediate
|
|
1279
1494
|
});
|
|
1280
1495
|
throw error;
|
|
@@ -1290,7 +1505,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
1290
1505
|
posthogTraceId: traceId,
|
|
1291
1506
|
posthogDistinctId: options.posthogDistinctId
|
|
1292
1507
|
});
|
|
1293
|
-
const wrappedModel =
|
|
1508
|
+
const wrappedModel = wrapLanguageModel({
|
|
1294
1509
|
model,
|
|
1295
1510
|
middleware
|
|
1296
1511
|
});
|
|
@@ -1357,6 +1572,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1357
1572
|
}
|
|
1358
1573
|
}
|
|
1359
1574
|
const latency = (Date.now() - startTime) / 1000;
|
|
1575
|
+
const availableTools = extractAvailableToolCalls('anthropic', anthropicParams);
|
|
1360
1576
|
await sendEventToPosthog({
|
|
1361
1577
|
client: this.phClient,
|
|
1362
1578
|
distinctId: posthogDistinctId,
|
|
@@ -1373,6 +1589,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1373
1589
|
params: body,
|
|
1374
1590
|
httpStatus: 200,
|
|
1375
1591
|
usage,
|
|
1592
|
+
tools: availableTools,
|
|
1376
1593
|
captureImmediate: posthogCaptureImmediate
|
|
1377
1594
|
});
|
|
1378
1595
|
} catch (error) {
|
|
@@ -1408,6 +1625,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1408
1625
|
const wrappedPromise = parentPromise.then(async result => {
|
|
1409
1626
|
if ('content' in result) {
|
|
1410
1627
|
const latency = (Date.now() - startTime) / 1000;
|
|
1628
|
+
const availableTools = extractAvailableToolCalls('anthropic', anthropicParams);
|
|
1411
1629
|
await sendEventToPosthog({
|
|
1412
1630
|
client: this.phClient,
|
|
1413
1631
|
distinctId: posthogDistinctId,
|
|
@@ -1426,6 +1644,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1426
1644
|
cacheCreationInputTokens: result.usage.cache_creation_input_tokens ?? 0,
|
|
1427
1645
|
cacheReadInputTokens: result.usage.cache_read_input_tokens ?? 0
|
|
1428
1646
|
},
|
|
1647
|
+
tools: availableTools,
|
|
1429
1648
|
captureImmediate: posthogCaptureImmediate
|
|
1430
1649
|
});
|
|
1431
1650
|
}
|
|
@@ -1488,6 +1707,7 @@ class WrappedModels {
|
|
|
1488
1707
|
try {
|
|
1489
1708
|
const response = await this.client.models.generateContent(geminiParams);
|
|
1490
1709
|
const latency = (Date.now() - startTime) / 1000;
|
|
1710
|
+
const availableTools = extractAvailableToolCalls('gemini', geminiParams);
|
|
1491
1711
|
await sendEventToPosthog({
|
|
1492
1712
|
client: this.phClient,
|
|
1493
1713
|
distinctId: posthogDistinctId,
|
|
@@ -1495,7 +1715,7 @@ class WrappedModels {
|
|
|
1495
1715
|
model: geminiParams.model,
|
|
1496
1716
|
provider: 'gemini',
|
|
1497
1717
|
input: this.formatInput(geminiParams.contents),
|
|
1498
|
-
output:
|
|
1718
|
+
output: formatResponseGemini(response),
|
|
1499
1719
|
latency,
|
|
1500
1720
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
1501
1721
|
params: params,
|
|
@@ -1504,6 +1724,7 @@ class WrappedModels {
|
|
|
1504
1724
|
inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
|
|
1505
1725
|
outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0
|
|
1506
1726
|
},
|
|
1727
|
+
tools: availableTools,
|
|
1507
1728
|
captureImmediate: posthogCaptureImmediate
|
|
1508
1729
|
});
|
|
1509
1730
|
return response;
|
|
@@ -1563,6 +1784,7 @@ class WrappedModels {
|
|
|
1563
1784
|
yield chunk;
|
|
1564
1785
|
}
|
|
1565
1786
|
const latency = (Date.now() - startTime) / 1000;
|
|
1787
|
+
const availableTools = extractAvailableToolCalls('gemini', geminiParams);
|
|
1566
1788
|
await sendEventToPosthog({
|
|
1567
1789
|
client: this.phClient,
|
|
1568
1790
|
distinctId: posthogDistinctId,
|
|
@@ -1579,6 +1801,7 @@ class WrappedModels {
|
|
|
1579
1801
|
params: params,
|
|
1580
1802
|
httpStatus: 200,
|
|
1581
1803
|
usage,
|
|
1804
|
+
tools: availableTools,
|
|
1582
1805
|
captureImmediate: posthogCaptureImmediate
|
|
1583
1806
|
});
|
|
1584
1807
|
} catch (error) {
|
|
@@ -1660,30 +1883,6 @@ class WrappedModels {
|
|
|
1660
1883
|
content: String(contents)
|
|
1661
1884
|
}];
|
|
1662
1885
|
}
|
|
1663
|
-
formatOutput(response) {
|
|
1664
|
-
if (response.text) {
|
|
1665
|
-
return [{
|
|
1666
|
-
role: 'assistant',
|
|
1667
|
-
content: response.text
|
|
1668
|
-
}];
|
|
1669
|
-
}
|
|
1670
|
-
if (response.candidates && Array.isArray(response.candidates)) {
|
|
1671
|
-
return response.candidates.map(candidate => {
|
|
1672
|
-
if (candidate.content && candidate.content.parts) {
|
|
1673
|
-
const text = candidate.content.parts.filter(part => part.text).map(part => part.text).join('');
|
|
1674
|
-
return {
|
|
1675
|
-
role: 'assistant',
|
|
1676
|
-
content: text
|
|
1677
|
-
};
|
|
1678
|
-
}
|
|
1679
|
-
return {
|
|
1680
|
-
role: 'assistant',
|
|
1681
|
-
content: String(candidate)
|
|
1682
|
-
};
|
|
1683
|
-
});
|
|
1684
|
-
}
|
|
1685
|
-
return [];
|
|
1686
|
-
}
|
|
1687
1886
|
}
|
|
1688
1887
|
|
|
1689
1888
|
function getDefaultExportFromCjs (x) {
|
|
@@ -2380,7 +2579,7 @@ class LangChainCallbackHandler extends BaseCallbackHandler {
|
|
|
2380
2579
|
};
|
|
2381
2580
|
if (extraParams) {
|
|
2382
2581
|
generation.modelParams = getModelParams(extraParams.invocation_params);
|
|
2383
|
-
if (extraParams.invocation_params.tools) {
|
|
2582
|
+
if (extraParams.invocation_params && extraParams.invocation_params.tools) {
|
|
2384
2583
|
generation.tools = extraParams.invocation_params.tools;
|
|
2385
2584
|
}
|
|
2386
2585
|
}
|
|
@@ -2489,7 +2688,7 @@ class LangChainCallbackHandler extends BaseCallbackHandler {
|
|
|
2489
2688
|
$ai_base_url: run.baseUrl
|
|
2490
2689
|
};
|
|
2491
2690
|
if (run.tools) {
|
|
2492
|
-
eventProperties['$ai_tools'] =
|
|
2691
|
+
eventProperties['$ai_tools'] = run.tools;
|
|
2493
2692
|
}
|
|
2494
2693
|
if (output instanceof Error) {
|
|
2495
2694
|
eventProperties['$ai_http_status'] = output.status || 500;
|
|
@@ -2511,13 +2710,20 @@ class LangChainCallbackHandler extends BaseCallbackHandler {
|
|
|
2511
2710
|
let completions;
|
|
2512
2711
|
if (output.generations && Array.isArray(output.generations)) {
|
|
2513
2712
|
const lastGeneration = output.generations[output.generations.length - 1];
|
|
2514
|
-
if (Array.isArray(lastGeneration)) {
|
|
2515
|
-
|
|
2516
|
-
|
|
2517
|
-
|
|
2518
|
-
|
|
2519
|
-
|
|
2520
|
-
|
|
2713
|
+
if (Array.isArray(lastGeneration) && lastGeneration.length > 0) {
|
|
2714
|
+
// Check if this is a ChatGeneration by looking at the first item
|
|
2715
|
+
const isChatGeneration = 'message' in lastGeneration[0] && lastGeneration[0].message;
|
|
2716
|
+
if (isChatGeneration) {
|
|
2717
|
+
// For ChatGeneration, convert messages to dict format
|
|
2718
|
+
completions = lastGeneration.map(gen => {
|
|
2719
|
+
return this._convertMessageToDict(gen.message);
|
|
2720
|
+
});
|
|
2721
|
+
} else {
|
|
2722
|
+
// For non-ChatGeneration, extract raw response
|
|
2723
|
+
completions = lastGeneration.map(gen => {
|
|
2724
|
+
return this._extractRawResponse(gen);
|
|
2725
|
+
});
|
|
2726
|
+
}
|
|
2521
2727
|
}
|
|
2522
2728
|
}
|
|
2523
2729
|
if (completions) {
|
|
@@ -2568,6 +2774,19 @@ class LangChainCallbackHandler extends BaseCallbackHandler {
|
|
|
2568
2774
|
}
|
|
2569
2775
|
}));
|
|
2570
2776
|
}
|
|
2777
|
+
_extractRawResponse(generation) {
|
|
2778
|
+
// Extract the response from the last response of the LLM call
|
|
2779
|
+
// We return the text of the response if not empty
|
|
2780
|
+
if (generation.text != null && generation.text.trim() !== '') {
|
|
2781
|
+
return generation.text.trim();
|
|
2782
|
+
} else if (generation.message) {
|
|
2783
|
+
// Additional kwargs contains the response in case of tool usage
|
|
2784
|
+
return generation.message.additional_kwargs || generation.message.additionalKwargs || {};
|
|
2785
|
+
} else {
|
|
2786
|
+
// Not tool usage, some LLM responses can be simply empty
|
|
2787
|
+
return '';
|
|
2788
|
+
}
|
|
2789
|
+
}
|
|
2571
2790
|
_convertMessageToDict(message) {
|
|
2572
2791
|
let messageDict = {};
|
|
2573
2792
|
const messageType = message.getType();
|