@posthog/ai 6.1.1 → 6.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.cjs +84 -4
- package/dist/anthropic/index.cjs.map +1 -1
- package/dist/anthropic/index.mjs +84 -4
- package/dist/anthropic/index.mjs.map +1 -1
- package/dist/gemini/index.cjs +84 -29
- package/dist/gemini/index.cjs.map +1 -1
- package/dist/gemini/index.d.ts +3 -21
- package/dist/gemini/index.mjs +84 -29
- package/dist/gemini/index.mjs.map +1 -1
- package/dist/index.cjs +380 -68
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +3 -21
- package/dist/index.mjs +367 -55
- package/dist/index.mjs.map +1 -1
- package/dist/langchain/index.cjs +6 -0
- package/dist/langchain/index.cjs.map +1 -1
- package/dist/langchain/index.mjs +6 -0
- package/dist/langchain/index.mjs.map +1 -1
- package/dist/openai/index.cjs +93 -11
- package/dist/openai/index.cjs.map +1 -1
- package/dist/openai/index.mjs +93 -11
- package/dist/openai/index.mjs.map +1 -1
- package/dist/vercel/index.cjs +53 -3
- package/dist/vercel/index.cjs.map +1 -1
- package/dist/vercel/index.mjs +53 -3
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.cjs
CHANGED
|
@@ -8,24 +8,26 @@ var AnthropicOriginal = require('@anthropic-ai/sdk');
|
|
|
8
8
|
var genai = require('@google/genai');
|
|
9
9
|
|
|
10
10
|
function _interopNamespaceDefault(e) {
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
});
|
|
20
|
-
}
|
|
11
|
+
var n = Object.create(null);
|
|
12
|
+
if (e) {
|
|
13
|
+
Object.keys(e).forEach(function (k) {
|
|
14
|
+
if (k !== 'default') {
|
|
15
|
+
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
16
|
+
Object.defineProperty(n, k, d.get ? d : {
|
|
17
|
+
enumerable: true,
|
|
18
|
+
get: function () { return e[k]; }
|
|
21
19
|
});
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
20
|
+
}
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
n.default = e;
|
|
24
|
+
return Object.freeze(n);
|
|
25
25
|
}
|
|
26
26
|
|
|
27
27
|
var uuid__namespace = /*#__PURE__*/_interopNamespaceDefault(uuid);
|
|
28
28
|
|
|
29
|
+
var version = "6.2.0";
|
|
30
|
+
|
|
29
31
|
// limit large outputs by truncating to 200kb (approx 200k bytes)
|
|
30
32
|
const MAX_OUTPUT_SIZE = 200000;
|
|
31
33
|
const STRING_FORMAT = 'utf8';
|
|
@@ -258,9 +260,8 @@ const extractAvailableToolCalls = (provider, params) => {
|
|
|
258
260
|
}
|
|
259
261
|
return null;
|
|
260
262
|
} else if (provider === 'vercel') {
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
return params.mode.tools;
|
|
263
|
+
if (params.tools) {
|
|
264
|
+
return params.tools;
|
|
264
265
|
}
|
|
265
266
|
return null;
|
|
266
267
|
}
|
|
@@ -334,6 +335,8 @@ const sendEventToPosthog = async ({
|
|
|
334
335
|
} : {})
|
|
335
336
|
};
|
|
336
337
|
const properties = {
|
|
338
|
+
$ai_lib: 'posthog-ai',
|
|
339
|
+
$ai_lib_version: version,
|
|
337
340
|
$ai_provider: params.posthogProviderOverride ?? provider,
|
|
338
341
|
$ai_model: params.posthogModelOverride ?? model,
|
|
339
342
|
$ai_model_parameters: getModelParams(params),
|
|
@@ -615,14 +618,52 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
615
618
|
const [stream1, stream2] = value.tee();
|
|
616
619
|
(async () => {
|
|
617
620
|
try {
|
|
621
|
+
const contentBlocks = [];
|
|
618
622
|
let accumulatedContent = '';
|
|
619
623
|
let usage = {
|
|
620
624
|
inputTokens: 0,
|
|
621
625
|
outputTokens: 0
|
|
622
626
|
};
|
|
627
|
+
// Map to track in-progress tool calls
|
|
628
|
+
const toolCallsInProgress = new Map();
|
|
623
629
|
for await (const chunk of stream1) {
|
|
624
|
-
const
|
|
625
|
-
|
|
630
|
+
const choice = chunk?.choices?.[0];
|
|
631
|
+
// Handle text content
|
|
632
|
+
const deltaContent = choice?.delta?.content;
|
|
633
|
+
if (deltaContent) {
|
|
634
|
+
accumulatedContent += deltaContent;
|
|
635
|
+
}
|
|
636
|
+
// Handle tool calls
|
|
637
|
+
const deltaToolCalls = choice?.delta?.tool_calls;
|
|
638
|
+
if (deltaToolCalls && Array.isArray(deltaToolCalls)) {
|
|
639
|
+
for (const toolCall of deltaToolCalls) {
|
|
640
|
+
const index = toolCall.index;
|
|
641
|
+
if (index !== undefined) {
|
|
642
|
+
if (!toolCallsInProgress.has(index)) {
|
|
643
|
+
// New tool call
|
|
644
|
+
toolCallsInProgress.set(index, {
|
|
645
|
+
id: toolCall.id || '',
|
|
646
|
+
name: toolCall.function?.name || '',
|
|
647
|
+
arguments: ''
|
|
648
|
+
});
|
|
649
|
+
}
|
|
650
|
+
const inProgressCall = toolCallsInProgress.get(index);
|
|
651
|
+
if (inProgressCall) {
|
|
652
|
+
// Update tool call data
|
|
653
|
+
if (toolCall.id) {
|
|
654
|
+
inProgressCall.id = toolCall.id;
|
|
655
|
+
}
|
|
656
|
+
if (toolCall.function?.name) {
|
|
657
|
+
inProgressCall.name = toolCall.function.name;
|
|
658
|
+
}
|
|
659
|
+
if (toolCall.function?.arguments) {
|
|
660
|
+
inProgressCall.arguments += toolCall.function.arguments;
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
// Handle usage information
|
|
626
667
|
if (chunk.usage) {
|
|
627
668
|
usage = {
|
|
628
669
|
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
@@ -632,6 +673,37 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
632
673
|
};
|
|
633
674
|
}
|
|
634
675
|
}
|
|
676
|
+
// Build final content blocks
|
|
677
|
+
if (accumulatedContent) {
|
|
678
|
+
contentBlocks.push({
|
|
679
|
+
type: 'text',
|
|
680
|
+
text: accumulatedContent
|
|
681
|
+
});
|
|
682
|
+
}
|
|
683
|
+
// Add completed tool calls to content blocks
|
|
684
|
+
for (const toolCall of toolCallsInProgress.values()) {
|
|
685
|
+
if (toolCall.name) {
|
|
686
|
+
contentBlocks.push({
|
|
687
|
+
type: 'function',
|
|
688
|
+
id: toolCall.id,
|
|
689
|
+
function: {
|
|
690
|
+
name: toolCall.name,
|
|
691
|
+
arguments: toolCall.arguments
|
|
692
|
+
}
|
|
693
|
+
});
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
// Format output to match non-streaming version
|
|
697
|
+
const formattedOutput = contentBlocks.length > 0 ? [{
|
|
698
|
+
role: 'assistant',
|
|
699
|
+
content: contentBlocks
|
|
700
|
+
}] : [{
|
|
701
|
+
role: 'assistant',
|
|
702
|
+
content: [{
|
|
703
|
+
type: 'text',
|
|
704
|
+
text: ''
|
|
705
|
+
}]
|
|
706
|
+
}];
|
|
635
707
|
const latency = (Date.now() - startTime) / 1000;
|
|
636
708
|
const availableTools = extractAvailableToolCalls('openai', openAIParams);
|
|
637
709
|
await sendEventToPosthog({
|
|
@@ -641,10 +713,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
641
713
|
model: openAIParams.model,
|
|
642
714
|
provider: 'openai',
|
|
643
715
|
input: sanitizeOpenAI(openAIParams.messages),
|
|
644
|
-
output:
|
|
645
|
-
content: accumulatedContent,
|
|
646
|
-
role: 'assistant'
|
|
647
|
-
}],
|
|
716
|
+
output: formattedOutput,
|
|
648
717
|
latency,
|
|
649
718
|
baseURL: this.baseURL ?? '',
|
|
650
719
|
params: body,
|
|
@@ -654,6 +723,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
654
723
|
captureImmediate: posthogCaptureImmediate
|
|
655
724
|
});
|
|
656
725
|
} catch (error) {
|
|
726
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
657
727
|
await sendEventToPosthog({
|
|
658
728
|
client: this.phClient,
|
|
659
729
|
distinctId: posthogDistinctId,
|
|
@@ -665,7 +735,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
665
735
|
latency: 0,
|
|
666
736
|
baseURL: this.baseURL ?? '',
|
|
667
737
|
params: body,
|
|
668
|
-
httpStatus
|
|
738
|
+
httpStatus,
|
|
669
739
|
usage: {
|
|
670
740
|
inputTokens: 0,
|
|
671
741
|
outputTokens: 0
|
|
@@ -710,6 +780,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
710
780
|
}
|
|
711
781
|
return result;
|
|
712
782
|
}, async error => {
|
|
783
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
713
784
|
await sendEventToPosthog({
|
|
714
785
|
client: this.phClient,
|
|
715
786
|
distinctId: posthogDistinctId,
|
|
@@ -721,7 +792,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
721
792
|
latency: 0,
|
|
722
793
|
baseURL: this.baseURL ?? '',
|
|
723
794
|
params: body,
|
|
724
|
-
httpStatus
|
|
795
|
+
httpStatus,
|
|
725
796
|
usage: {
|
|
726
797
|
inputTokens: 0,
|
|
727
798
|
outputTokens: 0
|
|
@@ -800,6 +871,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
800
871
|
captureImmediate: posthogCaptureImmediate
|
|
801
872
|
});
|
|
802
873
|
} catch (error) {
|
|
874
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
803
875
|
await sendEventToPosthog({
|
|
804
876
|
client: this.phClient,
|
|
805
877
|
distinctId: posthogDistinctId,
|
|
@@ -812,7 +884,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
812
884
|
latency: 0,
|
|
813
885
|
baseURL: this.baseURL ?? '',
|
|
814
886
|
params: body,
|
|
815
|
-
httpStatus
|
|
887
|
+
httpStatus,
|
|
816
888
|
usage: {
|
|
817
889
|
inputTokens: 0,
|
|
818
890
|
outputTokens: 0
|
|
@@ -859,6 +931,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
859
931
|
}
|
|
860
932
|
return result;
|
|
861
933
|
}, async error => {
|
|
934
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
862
935
|
await sendEventToPosthog({
|
|
863
936
|
client: this.phClient,
|
|
864
937
|
distinctId: posthogDistinctId,
|
|
@@ -871,7 +944,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
871
944
|
latency: 0,
|
|
872
945
|
baseURL: this.baseURL ?? '',
|
|
873
946
|
params: body,
|
|
874
|
-
httpStatus
|
|
947
|
+
httpStatus,
|
|
875
948
|
usage: {
|
|
876
949
|
inputTokens: 0,
|
|
877
950
|
outputTokens: 0
|
|
@@ -930,6 +1003,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
930
1003
|
});
|
|
931
1004
|
return result;
|
|
932
1005
|
}, async error => {
|
|
1006
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
933
1007
|
await sendEventToPosthog({
|
|
934
1008
|
client: this.phClient,
|
|
935
1009
|
distinctId: posthogDistinctId,
|
|
@@ -942,7 +1016,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
942
1016
|
latency: 0,
|
|
943
1017
|
baseURL: this.baseURL ?? '',
|
|
944
1018
|
params: body,
|
|
945
|
-
httpStatus
|
|
1019
|
+
httpStatus,
|
|
946
1020
|
usage: {
|
|
947
1021
|
inputTokens: 0,
|
|
948
1022
|
outputTokens: 0
|
|
@@ -1004,14 +1078,52 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
|
|
|
1004
1078
|
const [stream1, stream2] = value.tee();
|
|
1005
1079
|
(async () => {
|
|
1006
1080
|
try {
|
|
1081
|
+
const contentBlocks = [];
|
|
1007
1082
|
let accumulatedContent = '';
|
|
1008
1083
|
let usage = {
|
|
1009
1084
|
inputTokens: 0,
|
|
1010
1085
|
outputTokens: 0
|
|
1011
1086
|
};
|
|
1087
|
+
// Map to track in-progress tool calls
|
|
1088
|
+
const toolCallsInProgress = new Map();
|
|
1012
1089
|
for await (const chunk of stream1) {
|
|
1013
|
-
const
|
|
1014
|
-
|
|
1090
|
+
const choice = chunk?.choices?.[0];
|
|
1091
|
+
// Handle text content
|
|
1092
|
+
const deltaContent = choice?.delta?.content;
|
|
1093
|
+
if (deltaContent) {
|
|
1094
|
+
accumulatedContent += deltaContent;
|
|
1095
|
+
}
|
|
1096
|
+
// Handle tool calls
|
|
1097
|
+
const deltaToolCalls = choice?.delta?.tool_calls;
|
|
1098
|
+
if (deltaToolCalls && Array.isArray(deltaToolCalls)) {
|
|
1099
|
+
for (const toolCall of deltaToolCalls) {
|
|
1100
|
+
const index = toolCall.index;
|
|
1101
|
+
if (index !== undefined) {
|
|
1102
|
+
if (!toolCallsInProgress.has(index)) {
|
|
1103
|
+
// New tool call
|
|
1104
|
+
toolCallsInProgress.set(index, {
|
|
1105
|
+
id: toolCall.id || '',
|
|
1106
|
+
name: toolCall.function?.name || '',
|
|
1107
|
+
arguments: ''
|
|
1108
|
+
});
|
|
1109
|
+
}
|
|
1110
|
+
const inProgressCall = toolCallsInProgress.get(index);
|
|
1111
|
+
if (inProgressCall) {
|
|
1112
|
+
// Update tool call data
|
|
1113
|
+
if (toolCall.id) {
|
|
1114
|
+
inProgressCall.id = toolCall.id;
|
|
1115
|
+
}
|
|
1116
|
+
if (toolCall.function?.name) {
|
|
1117
|
+
inProgressCall.name = toolCall.function.name;
|
|
1118
|
+
}
|
|
1119
|
+
if (toolCall.function?.arguments) {
|
|
1120
|
+
inProgressCall.arguments += toolCall.function.arguments;
|
|
1121
|
+
}
|
|
1122
|
+
}
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
// Handle usage information
|
|
1015
1127
|
if (chunk.usage) {
|
|
1016
1128
|
usage = {
|
|
1017
1129
|
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
@@ -1021,6 +1133,37 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
|
|
|
1021
1133
|
};
|
|
1022
1134
|
}
|
|
1023
1135
|
}
|
|
1136
|
+
// Build final content blocks
|
|
1137
|
+
if (accumulatedContent) {
|
|
1138
|
+
contentBlocks.push({
|
|
1139
|
+
type: 'text',
|
|
1140
|
+
text: accumulatedContent
|
|
1141
|
+
});
|
|
1142
|
+
}
|
|
1143
|
+
// Add completed tool calls to content blocks
|
|
1144
|
+
for (const toolCall of toolCallsInProgress.values()) {
|
|
1145
|
+
if (toolCall.name) {
|
|
1146
|
+
contentBlocks.push({
|
|
1147
|
+
type: 'function',
|
|
1148
|
+
id: toolCall.id,
|
|
1149
|
+
function: {
|
|
1150
|
+
name: toolCall.name,
|
|
1151
|
+
arguments: toolCall.arguments
|
|
1152
|
+
}
|
|
1153
|
+
});
|
|
1154
|
+
}
|
|
1155
|
+
}
|
|
1156
|
+
// Format output to match non-streaming version
|
|
1157
|
+
const formattedOutput = contentBlocks.length > 0 ? [{
|
|
1158
|
+
role: 'assistant',
|
|
1159
|
+
content: contentBlocks
|
|
1160
|
+
}] : [{
|
|
1161
|
+
role: 'assistant',
|
|
1162
|
+
content: [{
|
|
1163
|
+
type: 'text',
|
|
1164
|
+
text: ''
|
|
1165
|
+
}]
|
|
1166
|
+
}];
|
|
1024
1167
|
const latency = (Date.now() - startTime) / 1000;
|
|
1025
1168
|
await sendEventToPosthog({
|
|
1026
1169
|
client: this.phClient,
|
|
@@ -1029,10 +1172,7 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
|
|
|
1029
1172
|
model: openAIParams.model,
|
|
1030
1173
|
provider: 'azure',
|
|
1031
1174
|
input: openAIParams.messages,
|
|
1032
|
-
output:
|
|
1033
|
-
content: accumulatedContent,
|
|
1034
|
-
role: 'assistant'
|
|
1035
|
-
}],
|
|
1175
|
+
output: formattedOutput,
|
|
1036
1176
|
latency,
|
|
1037
1177
|
baseURL: this.baseURL ?? '',
|
|
1038
1178
|
params: body,
|
|
@@ -1041,6 +1181,7 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
|
|
|
1041
1181
|
captureImmediate: posthogCaptureImmediate
|
|
1042
1182
|
});
|
|
1043
1183
|
} catch (error) {
|
|
1184
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1044
1185
|
await sendEventToPosthog({
|
|
1045
1186
|
client: this.phClient,
|
|
1046
1187
|
distinctId: posthogDistinctId,
|
|
@@ -1052,7 +1193,7 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
|
|
|
1052
1193
|
latency: 0,
|
|
1053
1194
|
baseURL: this.baseURL ?? '',
|
|
1054
1195
|
params: body,
|
|
1055
|
-
httpStatus
|
|
1196
|
+
httpStatus,
|
|
1056
1197
|
usage: {
|
|
1057
1198
|
inputTokens: 0,
|
|
1058
1199
|
outputTokens: 0
|
|
@@ -1095,6 +1236,7 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
|
|
|
1095
1236
|
}
|
|
1096
1237
|
return result;
|
|
1097
1238
|
}, async error => {
|
|
1239
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1098
1240
|
await sendEventToPosthog({
|
|
1099
1241
|
client: this.phClient,
|
|
1100
1242
|
distinctId: posthogDistinctId,
|
|
@@ -1106,7 +1248,7 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
|
|
|
1106
1248
|
latency: 0,
|
|
1107
1249
|
baseURL: this.baseURL ?? '',
|
|
1108
1250
|
params: body,
|
|
1109
|
-
httpStatus
|
|
1251
|
+
httpStatus,
|
|
1110
1252
|
usage: {
|
|
1111
1253
|
inputTokens: 0,
|
|
1112
1254
|
outputTokens: 0
|
|
@@ -1183,6 +1325,7 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
|
|
|
1183
1325
|
captureImmediate: posthogCaptureImmediate
|
|
1184
1326
|
});
|
|
1185
1327
|
} catch (error) {
|
|
1328
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1186
1329
|
await sendEventToPosthog({
|
|
1187
1330
|
client: this.phClient,
|
|
1188
1331
|
distinctId: posthogDistinctId,
|
|
@@ -1195,7 +1338,7 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
|
|
|
1195
1338
|
latency: 0,
|
|
1196
1339
|
baseURL: this.baseURL ?? '',
|
|
1197
1340
|
params: body,
|
|
1198
|
-
httpStatus
|
|
1341
|
+
httpStatus,
|
|
1199
1342
|
usage: {
|
|
1200
1343
|
inputTokens: 0,
|
|
1201
1344
|
outputTokens: 0
|
|
@@ -1238,6 +1381,7 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
|
|
|
1238
1381
|
}
|
|
1239
1382
|
return result;
|
|
1240
1383
|
}, async error => {
|
|
1384
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1241
1385
|
await sendEventToPosthog({
|
|
1242
1386
|
client: this.phClient,
|
|
1243
1387
|
distinctId: posthogDistinctId,
|
|
@@ -1250,7 +1394,7 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
|
|
|
1250
1394
|
latency: 0,
|
|
1251
1395
|
baseURL: this.baseURL ?? '',
|
|
1252
1396
|
params: body,
|
|
1253
|
-
httpStatus
|
|
1397
|
+
httpStatus,
|
|
1254
1398
|
usage: {
|
|
1255
1399
|
inputTokens: 0,
|
|
1256
1400
|
outputTokens: 0
|
|
@@ -1617,6 +1761,8 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1617
1761
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
1618
1762
|
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
1619
1763
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
1764
|
+
// Map to track in-progress tool calls
|
|
1765
|
+
const toolCallsInProgress = new Map();
|
|
1620
1766
|
try {
|
|
1621
1767
|
const {
|
|
1622
1768
|
stream,
|
|
@@ -1631,6 +1777,34 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1631
1777
|
if (chunk.type === 'reasoning-delta') {
|
|
1632
1778
|
reasoningText += chunk.delta; // New in v5
|
|
1633
1779
|
}
|
|
1780
|
+
// Handle tool call chunks
|
|
1781
|
+
if (chunk.type === 'tool-input-start') {
|
|
1782
|
+
// Initialize a new tool call
|
|
1783
|
+
toolCallsInProgress.set(chunk.id, {
|
|
1784
|
+
toolCallId: chunk.id,
|
|
1785
|
+
toolName: chunk.toolName,
|
|
1786
|
+
input: ''
|
|
1787
|
+
});
|
|
1788
|
+
}
|
|
1789
|
+
if (chunk.type === 'tool-input-delta') {
|
|
1790
|
+
// Accumulate tool call arguments
|
|
1791
|
+
const toolCall = toolCallsInProgress.get(chunk.id);
|
|
1792
|
+
if (toolCall) {
|
|
1793
|
+
toolCall.input += chunk.delta;
|
|
1794
|
+
}
|
|
1795
|
+
}
|
|
1796
|
+
if (chunk.type === 'tool-input-end') {
|
|
1797
|
+
// Tool call is complete, keep it in the map for final processing
|
|
1798
|
+
// Nothing specific to do here, the tool call is already complete
|
|
1799
|
+
}
|
|
1800
|
+
if (chunk.type === 'tool-call') {
|
|
1801
|
+
// Direct tool call chunk (complete tool call)
|
|
1802
|
+
toolCallsInProgress.set(chunk.toolCallId, {
|
|
1803
|
+
toolCallId: chunk.toolCallId,
|
|
1804
|
+
toolName: chunk.toolName,
|
|
1805
|
+
input: chunk.input
|
|
1806
|
+
});
|
|
1807
|
+
}
|
|
1634
1808
|
if (chunk.type === 'finish') {
|
|
1635
1809
|
const providerMetadata = chunk.providerMetadata;
|
|
1636
1810
|
const additionalTokenValues = {
|
|
@@ -1664,6 +1838,19 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1664
1838
|
text: truncate(generatedText)
|
|
1665
1839
|
});
|
|
1666
1840
|
}
|
|
1841
|
+
// Add completed tool calls to content
|
|
1842
|
+
for (const toolCall of toolCallsInProgress.values()) {
|
|
1843
|
+
if (toolCall.toolName) {
|
|
1844
|
+
content.push({
|
|
1845
|
+
type: 'tool-call',
|
|
1846
|
+
id: toolCall.toolCallId,
|
|
1847
|
+
function: {
|
|
1848
|
+
name: toolCall.toolName,
|
|
1849
|
+
arguments: toolCall.input
|
|
1850
|
+
}
|
|
1851
|
+
});
|
|
1852
|
+
}
|
|
1853
|
+
}
|
|
1667
1854
|
// Structure output like mapVercelOutput does
|
|
1668
1855
|
const output = content.length > 0 ? [{
|
|
1669
1856
|
role: 'assistant',
|
|
@@ -1766,6 +1953,9 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1766
1953
|
if (anthropicParams.stream) {
|
|
1767
1954
|
return parentPromise.then(value => {
|
|
1768
1955
|
let accumulatedContent = '';
|
|
1956
|
+
const contentBlocks = [];
|
|
1957
|
+
const toolsInProgress = new Map();
|
|
1958
|
+
let currentTextBlock = null;
|
|
1769
1959
|
const usage = {
|
|
1770
1960
|
inputTokens: 0,
|
|
1771
1961
|
outputTokens: 0,
|
|
@@ -1777,10 +1967,70 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1777
1967
|
(async () => {
|
|
1778
1968
|
try {
|
|
1779
1969
|
for await (const chunk of stream1) {
|
|
1970
|
+
// Handle content block start events
|
|
1971
|
+
if (chunk.type === 'content_block_start') {
|
|
1972
|
+
if (chunk.content_block?.type === 'text') {
|
|
1973
|
+
currentTextBlock = {
|
|
1974
|
+
type: 'text',
|
|
1975
|
+
text: ''
|
|
1976
|
+
};
|
|
1977
|
+
contentBlocks.push(currentTextBlock);
|
|
1978
|
+
} else if (chunk.content_block?.type === 'tool_use') {
|
|
1979
|
+
const toolBlock = {
|
|
1980
|
+
type: 'function',
|
|
1981
|
+
id: chunk.content_block.id,
|
|
1982
|
+
function: {
|
|
1983
|
+
name: chunk.content_block.name,
|
|
1984
|
+
arguments: {}
|
|
1985
|
+
}
|
|
1986
|
+
};
|
|
1987
|
+
contentBlocks.push(toolBlock);
|
|
1988
|
+
toolsInProgress.set(chunk.content_block.id, {
|
|
1989
|
+
block: toolBlock,
|
|
1990
|
+
inputString: ''
|
|
1991
|
+
});
|
|
1992
|
+
currentTextBlock = null;
|
|
1993
|
+
}
|
|
1994
|
+
}
|
|
1995
|
+
// Handle text delta events
|
|
1780
1996
|
if ('delta' in chunk) {
|
|
1781
1997
|
if ('text' in chunk.delta) {
|
|
1782
1998
|
const delta = chunk?.delta?.text ?? '';
|
|
1783
1999
|
accumulatedContent += delta;
|
|
2000
|
+
if (currentTextBlock) {
|
|
2001
|
+
currentTextBlock.text += delta;
|
|
2002
|
+
}
|
|
2003
|
+
}
|
|
2004
|
+
}
|
|
2005
|
+
// Handle tool input delta events
|
|
2006
|
+
if (chunk.type === 'content_block_delta' && chunk.delta?.type === 'input_json_delta') {
|
|
2007
|
+
const block = chunk.index !== undefined ? contentBlocks[chunk.index] : undefined;
|
|
2008
|
+
const toolId = block?.type === 'function' ? block.id : undefined;
|
|
2009
|
+
if (toolId && toolsInProgress.has(toolId)) {
|
|
2010
|
+
const tool = toolsInProgress.get(toolId);
|
|
2011
|
+
if (tool) {
|
|
2012
|
+
tool.inputString += chunk.delta.partial_json || '';
|
|
2013
|
+
}
|
|
2014
|
+
}
|
|
2015
|
+
}
|
|
2016
|
+
// Handle content block stop events
|
|
2017
|
+
if (chunk.type === 'content_block_stop') {
|
|
2018
|
+
currentTextBlock = null;
|
|
2019
|
+
// Parse accumulated tool input
|
|
2020
|
+
if (chunk.index !== undefined) {
|
|
2021
|
+
const block = contentBlocks[chunk.index];
|
|
2022
|
+
if (block?.type === 'function' && block.id && toolsInProgress.has(block.id)) {
|
|
2023
|
+
const tool = toolsInProgress.get(block.id);
|
|
2024
|
+
if (tool) {
|
|
2025
|
+
try {
|
|
2026
|
+
block.function.arguments = JSON.parse(tool.inputString);
|
|
2027
|
+
} catch (e) {
|
|
2028
|
+
// Keep empty object if parsing fails
|
|
2029
|
+
console.error('Error parsing tool input:', e);
|
|
2030
|
+
}
|
|
2031
|
+
}
|
|
2032
|
+
toolsInProgress.delete(block.id);
|
|
2033
|
+
}
|
|
1784
2034
|
}
|
|
1785
2035
|
}
|
|
1786
2036
|
if (chunk.type == 'message_start') {
|
|
@@ -1794,6 +2044,17 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1794
2044
|
}
|
|
1795
2045
|
const latency = (Date.now() - startTime) / 1000;
|
|
1796
2046
|
const availableTools = extractAvailableToolCalls('anthropic', anthropicParams);
|
|
2047
|
+
// Format output to match non-streaming version
|
|
2048
|
+
const formattedOutput = contentBlocks.length > 0 ? [{
|
|
2049
|
+
role: 'assistant',
|
|
2050
|
+
content: contentBlocks
|
|
2051
|
+
}] : [{
|
|
2052
|
+
role: 'assistant',
|
|
2053
|
+
content: [{
|
|
2054
|
+
type: 'text',
|
|
2055
|
+
text: accumulatedContent
|
|
2056
|
+
}]
|
|
2057
|
+
}];
|
|
1797
2058
|
await sendEventToPosthog({
|
|
1798
2059
|
client: this.phClient,
|
|
1799
2060
|
distinctId: posthogDistinctId,
|
|
@@ -1801,10 +2062,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1801
2062
|
model: anthropicParams.model,
|
|
1802
2063
|
provider: 'anthropic',
|
|
1803
2064
|
input: sanitizeAnthropic(mergeSystemPrompt(anthropicParams, 'anthropic')),
|
|
1804
|
-
output:
|
|
1805
|
-
content: accumulatedContent,
|
|
1806
|
-
role: 'assistant'
|
|
1807
|
-
}],
|
|
2065
|
+
output: formattedOutput,
|
|
1808
2066
|
latency,
|
|
1809
2067
|
baseURL: this.baseURL ?? '',
|
|
1810
2068
|
params: body,
|
|
@@ -1929,6 +2187,7 @@ class WrappedModels {
|
|
|
1929
2187
|
const response = await this.client.models.generateContent(geminiParams);
|
|
1930
2188
|
const latency = (Date.now() - startTime) / 1000;
|
|
1931
2189
|
const availableTools = extractAvailableToolCalls('gemini', geminiParams);
|
|
2190
|
+
const metadata = response.usageMetadata;
|
|
1932
2191
|
await sendEventToPosthog({
|
|
1933
2192
|
client: this.phClient,
|
|
1934
2193
|
distinctId: posthogDistinctId,
|
|
@@ -1942,10 +2201,10 @@ class WrappedModels {
|
|
|
1942
2201
|
params: params,
|
|
1943
2202
|
httpStatus: 200,
|
|
1944
2203
|
usage: {
|
|
1945
|
-
inputTokens:
|
|
1946
|
-
outputTokens:
|
|
1947
|
-
reasoningTokens:
|
|
1948
|
-
cacheReadInputTokens:
|
|
2204
|
+
inputTokens: metadata?.promptTokenCount ?? 0,
|
|
2205
|
+
outputTokens: metadata?.candidatesTokenCount ?? 0,
|
|
2206
|
+
reasoningTokens: metadata?.thoughtsTokenCount ?? 0,
|
|
2207
|
+
cacheReadInputTokens: metadata?.cachedContentTokenCount ?? 0
|
|
1949
2208
|
},
|
|
1950
2209
|
tools: availableTools,
|
|
1951
2210
|
captureImmediate: posthogCaptureImmediate
|
|
@@ -1987,7 +2246,7 @@ class WrappedModels {
|
|
|
1987
2246
|
} = params;
|
|
1988
2247
|
const traceId = posthogTraceId ?? uuid.v4();
|
|
1989
2248
|
const startTime = Date.now();
|
|
1990
|
-
|
|
2249
|
+
const accumulatedContent = [];
|
|
1991
2250
|
let usage = {
|
|
1992
2251
|
inputTokens: 0,
|
|
1993
2252
|
outputTokens: 0
|
|
@@ -1995,21 +2254,66 @@ class WrappedModels {
|
|
|
1995
2254
|
try {
|
|
1996
2255
|
const stream = await this.client.models.generateContentStream(geminiParams);
|
|
1997
2256
|
for await (const chunk of stream) {
|
|
2257
|
+
// Handle text content
|
|
1998
2258
|
if (chunk.text) {
|
|
1999
|
-
|
|
2259
|
+
// Find if we already have a text item to append to
|
|
2260
|
+
let lastTextItem;
|
|
2261
|
+
for (let i = accumulatedContent.length - 1; i >= 0; i--) {
|
|
2262
|
+
if (accumulatedContent[i].type === 'text') {
|
|
2263
|
+
lastTextItem = accumulatedContent[i];
|
|
2264
|
+
break;
|
|
2265
|
+
}
|
|
2266
|
+
}
|
|
2267
|
+
if (lastTextItem && lastTextItem.type === 'text') {
|
|
2268
|
+
lastTextItem.text += chunk.text;
|
|
2269
|
+
} else {
|
|
2270
|
+
accumulatedContent.push({
|
|
2271
|
+
type: 'text',
|
|
2272
|
+
text: chunk.text
|
|
2273
|
+
});
|
|
2274
|
+
}
|
|
2275
|
+
}
|
|
2276
|
+
// Handle function calls from candidates
|
|
2277
|
+
if (chunk.candidates && Array.isArray(chunk.candidates)) {
|
|
2278
|
+
for (const candidate of chunk.candidates) {
|
|
2279
|
+
if (candidate.content && candidate.content.parts) {
|
|
2280
|
+
for (const part of candidate.content.parts) {
|
|
2281
|
+
// Type-safe check for functionCall
|
|
2282
|
+
if ('functionCall' in part) {
|
|
2283
|
+
const funcCall = part.functionCall;
|
|
2284
|
+
if (funcCall?.name) {
|
|
2285
|
+
accumulatedContent.push({
|
|
2286
|
+
type: 'function',
|
|
2287
|
+
function: {
|
|
2288
|
+
name: funcCall.name,
|
|
2289
|
+
arguments: funcCall.args || {}
|
|
2290
|
+
}
|
|
2291
|
+
});
|
|
2292
|
+
}
|
|
2293
|
+
}
|
|
2294
|
+
}
|
|
2295
|
+
}
|
|
2296
|
+
}
|
|
2000
2297
|
}
|
|
2298
|
+
// Update usage metadata - handle both old and new field names
|
|
2001
2299
|
if (chunk.usageMetadata) {
|
|
2300
|
+
const metadata = chunk.usageMetadata;
|
|
2002
2301
|
usage = {
|
|
2003
|
-
inputTokens:
|
|
2004
|
-
outputTokens:
|
|
2005
|
-
reasoningTokens:
|
|
2006
|
-
cacheReadInputTokens:
|
|
2302
|
+
inputTokens: metadata.promptTokenCount ?? 0,
|
|
2303
|
+
outputTokens: metadata.candidatesTokenCount ?? 0,
|
|
2304
|
+
reasoningTokens: metadata.thoughtsTokenCount ?? 0,
|
|
2305
|
+
cacheReadInputTokens: metadata.cachedContentTokenCount ?? 0
|
|
2007
2306
|
};
|
|
2008
2307
|
}
|
|
2009
2308
|
yield chunk;
|
|
2010
2309
|
}
|
|
2011
2310
|
const latency = (Date.now() - startTime) / 1000;
|
|
2012
2311
|
const availableTools = extractAvailableToolCalls('gemini', geminiParams);
|
|
2312
|
+
// Format output similar to formatResponseGemini
|
|
2313
|
+
const output = accumulatedContent.length > 0 ? [{
|
|
2314
|
+
role: 'assistant',
|
|
2315
|
+
content: accumulatedContent
|
|
2316
|
+
}] : [];
|
|
2013
2317
|
await sendEventToPosthog({
|
|
2014
2318
|
client: this.phClient,
|
|
2015
2319
|
distinctId: posthogDistinctId,
|
|
@@ -2017,10 +2321,7 @@ class WrappedModels {
|
|
|
2017
2321
|
model: geminiParams.model,
|
|
2018
2322
|
provider: 'gemini',
|
|
2019
2323
|
input: this.formatInputForPostHog(geminiParams.contents),
|
|
2020
|
-
output
|
|
2021
|
-
content: accumulatedContent,
|
|
2022
|
-
role: 'assistant'
|
|
2023
|
-
}],
|
|
2324
|
+
output,
|
|
2024
2325
|
latency,
|
|
2025
2326
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
2026
2327
|
params: params,
|
|
@@ -2070,22 +2371,28 @@ class WrappedModels {
|
|
|
2070
2371
|
};
|
|
2071
2372
|
}
|
|
2072
2373
|
if (item && typeof item === 'object') {
|
|
2073
|
-
|
|
2374
|
+
const obj = item;
|
|
2375
|
+
if ('text' in obj && obj.text) {
|
|
2074
2376
|
return {
|
|
2075
|
-
role:
|
|
2076
|
-
content:
|
|
2377
|
+
role: obj.role || 'user',
|
|
2378
|
+
content: obj.text
|
|
2077
2379
|
};
|
|
2078
2380
|
}
|
|
2079
|
-
if (
|
|
2381
|
+
if ('content' in obj && obj.content) {
|
|
2080
2382
|
return {
|
|
2081
|
-
role:
|
|
2082
|
-
content:
|
|
2383
|
+
role: obj.role || 'user',
|
|
2384
|
+
content: obj.content
|
|
2083
2385
|
};
|
|
2084
2386
|
}
|
|
2085
|
-
if (
|
|
2387
|
+
if ('parts' in obj && Array.isArray(obj.parts)) {
|
|
2086
2388
|
return {
|
|
2087
|
-
role:
|
|
2088
|
-
content:
|
|
2389
|
+
role: obj.role || 'user',
|
|
2390
|
+
content: obj.parts.map(part => {
|
|
2391
|
+
if (part && typeof part === 'object' && 'text' in part) {
|
|
2392
|
+
return part.text;
|
|
2393
|
+
}
|
|
2394
|
+
return part;
|
|
2395
|
+
})
|
|
2089
2396
|
};
|
|
2090
2397
|
}
|
|
2091
2398
|
}
|
|
@@ -2096,16 +2403,17 @@ class WrappedModels {
|
|
|
2096
2403
|
});
|
|
2097
2404
|
}
|
|
2098
2405
|
if (contents && typeof contents === 'object') {
|
|
2099
|
-
|
|
2406
|
+
const obj = contents;
|
|
2407
|
+
if ('text' in obj && obj.text) {
|
|
2100
2408
|
return [{
|
|
2101
2409
|
role: 'user',
|
|
2102
|
-
content:
|
|
2410
|
+
content: obj.text
|
|
2103
2411
|
}];
|
|
2104
2412
|
}
|
|
2105
|
-
if (
|
|
2413
|
+
if ('content' in obj && obj.content) {
|
|
2106
2414
|
return [{
|
|
2107
2415
|
role: 'user',
|
|
2108
|
-
content:
|
|
2416
|
+
content: obj.content
|
|
2109
2417
|
}];
|
|
2110
2418
|
}
|
|
2111
2419
|
}
|
|
@@ -2870,6 +3178,8 @@ class LangChainCallbackHandler extends BaseCallbackHandler {
|
|
|
2870
3178
|
const eventName = parentRunId ? '$ai_span' : '$ai_trace';
|
|
2871
3179
|
const latency = run.endTime ? (run.endTime - run.startTime) / 1000 : 0;
|
|
2872
3180
|
const eventProperties = {
|
|
3181
|
+
$ai_lib: 'posthog-ai',
|
|
3182
|
+
$ai_lib_version: version,
|
|
2873
3183
|
$ai_trace_id: traceId,
|
|
2874
3184
|
$ai_input_state: withPrivacyMode(this.client, this.privacyMode, run.input),
|
|
2875
3185
|
$ai_latency: latency,
|
|
@@ -2910,6 +3220,8 @@ class LangChainCallbackHandler extends BaseCallbackHandler {
|
|
|
2910
3220
|
_captureGeneration(traceId, runId, run, output, parentRunId) {
|
|
2911
3221
|
const latency = run.endTime ? (run.endTime - run.startTime) / 1000 : 0;
|
|
2912
3222
|
const eventProperties = {
|
|
3223
|
+
$ai_lib: 'posthog-ai',
|
|
3224
|
+
$ai_lib_version: version,
|
|
2913
3225
|
$ai_trace_id: traceId,
|
|
2914
3226
|
$ai_span_id: runId,
|
|
2915
3227
|
$ai_span_name: run.name,
|