@posthog/ai 6.1.1 → 6.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.cjs +80 -4
- package/dist/anthropic/index.cjs.map +1 -1
- package/dist/anthropic/index.mjs +80 -4
- package/dist/anthropic/index.mjs.map +1 -1
- package/dist/gemini/index.cjs +80 -29
- package/dist/gemini/index.cjs.map +1 -1
- package/dist/gemini/index.d.ts +3 -21
- package/dist/gemini/index.mjs +80 -29
- package/dist/gemini/index.mjs.map +1 -1
- package/dist/index.cjs +359 -55
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +3 -21
- package/dist/index.mjs +359 -55
- package/dist/index.mjs.map +1 -1
- package/dist/langchain/index.cjs.map +1 -1
- package/dist/langchain/index.mjs.map +1 -1
- package/dist/openai/index.cjs +89 -11
- package/dist/openai/index.cjs.map +1 -1
- package/dist/openai/index.mjs +89 -11
- package/dist/openai/index.mjs.map +1 -1
- package/dist/vercel/index.cjs +49 -3
- package/dist/vercel/index.cjs.map +1 -1
- package/dist/vercel/index.mjs +49 -3
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -238,9 +238,8 @@ const extractAvailableToolCalls = (provider, params) => {
|
|
|
238
238
|
}
|
|
239
239
|
return null;
|
|
240
240
|
} else if (provider === 'vercel') {
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
return params.mode.tools;
|
|
241
|
+
if (params.tools) {
|
|
242
|
+
return params.tools;
|
|
244
243
|
}
|
|
245
244
|
return null;
|
|
246
245
|
}
|
|
@@ -595,14 +594,52 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
595
594
|
const [stream1, stream2] = value.tee();
|
|
596
595
|
(async () => {
|
|
597
596
|
try {
|
|
597
|
+
const contentBlocks = [];
|
|
598
598
|
let accumulatedContent = '';
|
|
599
599
|
let usage = {
|
|
600
600
|
inputTokens: 0,
|
|
601
601
|
outputTokens: 0
|
|
602
602
|
};
|
|
603
|
+
// Map to track in-progress tool calls
|
|
604
|
+
const toolCallsInProgress = new Map();
|
|
603
605
|
for await (const chunk of stream1) {
|
|
604
|
-
const
|
|
605
|
-
|
|
606
|
+
const choice = chunk?.choices?.[0];
|
|
607
|
+
// Handle text content
|
|
608
|
+
const deltaContent = choice?.delta?.content;
|
|
609
|
+
if (deltaContent) {
|
|
610
|
+
accumulatedContent += deltaContent;
|
|
611
|
+
}
|
|
612
|
+
// Handle tool calls
|
|
613
|
+
const deltaToolCalls = choice?.delta?.tool_calls;
|
|
614
|
+
if (deltaToolCalls && Array.isArray(deltaToolCalls)) {
|
|
615
|
+
for (const toolCall of deltaToolCalls) {
|
|
616
|
+
const index = toolCall.index;
|
|
617
|
+
if (index !== undefined) {
|
|
618
|
+
if (!toolCallsInProgress.has(index)) {
|
|
619
|
+
// New tool call
|
|
620
|
+
toolCallsInProgress.set(index, {
|
|
621
|
+
id: toolCall.id || '',
|
|
622
|
+
name: toolCall.function?.name || '',
|
|
623
|
+
arguments: ''
|
|
624
|
+
});
|
|
625
|
+
}
|
|
626
|
+
const inProgressCall = toolCallsInProgress.get(index);
|
|
627
|
+
if (inProgressCall) {
|
|
628
|
+
// Update tool call data
|
|
629
|
+
if (toolCall.id) {
|
|
630
|
+
inProgressCall.id = toolCall.id;
|
|
631
|
+
}
|
|
632
|
+
if (toolCall.function?.name) {
|
|
633
|
+
inProgressCall.name = toolCall.function.name;
|
|
634
|
+
}
|
|
635
|
+
if (toolCall.function?.arguments) {
|
|
636
|
+
inProgressCall.arguments += toolCall.function.arguments;
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
}
|
|
642
|
+
// Handle usage information
|
|
606
643
|
if (chunk.usage) {
|
|
607
644
|
usage = {
|
|
608
645
|
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
@@ -612,6 +649,37 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
612
649
|
};
|
|
613
650
|
}
|
|
614
651
|
}
|
|
652
|
+
// Build final content blocks
|
|
653
|
+
if (accumulatedContent) {
|
|
654
|
+
contentBlocks.push({
|
|
655
|
+
type: 'text',
|
|
656
|
+
text: accumulatedContent
|
|
657
|
+
});
|
|
658
|
+
}
|
|
659
|
+
// Add completed tool calls to content blocks
|
|
660
|
+
for (const toolCall of toolCallsInProgress.values()) {
|
|
661
|
+
if (toolCall.name) {
|
|
662
|
+
contentBlocks.push({
|
|
663
|
+
type: 'function',
|
|
664
|
+
id: toolCall.id,
|
|
665
|
+
function: {
|
|
666
|
+
name: toolCall.name,
|
|
667
|
+
arguments: toolCall.arguments
|
|
668
|
+
}
|
|
669
|
+
});
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
// Format output to match non-streaming version
|
|
673
|
+
const formattedOutput = contentBlocks.length > 0 ? [{
|
|
674
|
+
role: 'assistant',
|
|
675
|
+
content: contentBlocks
|
|
676
|
+
}] : [{
|
|
677
|
+
role: 'assistant',
|
|
678
|
+
content: [{
|
|
679
|
+
type: 'text',
|
|
680
|
+
text: ''
|
|
681
|
+
}]
|
|
682
|
+
}];
|
|
615
683
|
const latency = (Date.now() - startTime) / 1000;
|
|
616
684
|
const availableTools = extractAvailableToolCalls('openai', openAIParams);
|
|
617
685
|
await sendEventToPosthog({
|
|
@@ -621,10 +689,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
621
689
|
model: openAIParams.model,
|
|
622
690
|
provider: 'openai',
|
|
623
691
|
input: sanitizeOpenAI(openAIParams.messages),
|
|
624
|
-
output:
|
|
625
|
-
content: accumulatedContent,
|
|
626
|
-
role: 'assistant'
|
|
627
|
-
}],
|
|
692
|
+
output: formattedOutput,
|
|
628
693
|
latency,
|
|
629
694
|
baseURL: this.baseURL ?? '',
|
|
630
695
|
params: body,
|
|
@@ -634,6 +699,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
634
699
|
captureImmediate: posthogCaptureImmediate
|
|
635
700
|
});
|
|
636
701
|
} catch (error) {
|
|
702
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
637
703
|
await sendEventToPosthog({
|
|
638
704
|
client: this.phClient,
|
|
639
705
|
distinctId: posthogDistinctId,
|
|
@@ -645,7 +711,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
645
711
|
latency: 0,
|
|
646
712
|
baseURL: this.baseURL ?? '',
|
|
647
713
|
params: body,
|
|
648
|
-
httpStatus
|
|
714
|
+
httpStatus,
|
|
649
715
|
usage: {
|
|
650
716
|
inputTokens: 0,
|
|
651
717
|
outputTokens: 0
|
|
@@ -690,6 +756,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
690
756
|
}
|
|
691
757
|
return result;
|
|
692
758
|
}, async error => {
|
|
759
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
693
760
|
await sendEventToPosthog({
|
|
694
761
|
client: this.phClient,
|
|
695
762
|
distinctId: posthogDistinctId,
|
|
@@ -701,7 +768,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
701
768
|
latency: 0,
|
|
702
769
|
baseURL: this.baseURL ?? '',
|
|
703
770
|
params: body,
|
|
704
|
-
httpStatus
|
|
771
|
+
httpStatus,
|
|
705
772
|
usage: {
|
|
706
773
|
inputTokens: 0,
|
|
707
774
|
outputTokens: 0
|
|
@@ -780,6 +847,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
780
847
|
captureImmediate: posthogCaptureImmediate
|
|
781
848
|
});
|
|
782
849
|
} catch (error) {
|
|
850
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
783
851
|
await sendEventToPosthog({
|
|
784
852
|
client: this.phClient,
|
|
785
853
|
distinctId: posthogDistinctId,
|
|
@@ -792,7 +860,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
792
860
|
latency: 0,
|
|
793
861
|
baseURL: this.baseURL ?? '',
|
|
794
862
|
params: body,
|
|
795
|
-
httpStatus
|
|
863
|
+
httpStatus,
|
|
796
864
|
usage: {
|
|
797
865
|
inputTokens: 0,
|
|
798
866
|
outputTokens: 0
|
|
@@ -839,6 +907,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
839
907
|
}
|
|
840
908
|
return result;
|
|
841
909
|
}, async error => {
|
|
910
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
842
911
|
await sendEventToPosthog({
|
|
843
912
|
client: this.phClient,
|
|
844
913
|
distinctId: posthogDistinctId,
|
|
@@ -851,7 +920,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
851
920
|
latency: 0,
|
|
852
921
|
baseURL: this.baseURL ?? '',
|
|
853
922
|
params: body,
|
|
854
|
-
httpStatus
|
|
923
|
+
httpStatus,
|
|
855
924
|
usage: {
|
|
856
925
|
inputTokens: 0,
|
|
857
926
|
outputTokens: 0
|
|
@@ -910,6 +979,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
910
979
|
});
|
|
911
980
|
return result;
|
|
912
981
|
}, async error => {
|
|
982
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
913
983
|
await sendEventToPosthog({
|
|
914
984
|
client: this.phClient,
|
|
915
985
|
distinctId: posthogDistinctId,
|
|
@@ -922,7 +992,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
922
992
|
latency: 0,
|
|
923
993
|
baseURL: this.baseURL ?? '',
|
|
924
994
|
params: body,
|
|
925
|
-
httpStatus
|
|
995
|
+
httpStatus,
|
|
926
996
|
usage: {
|
|
927
997
|
inputTokens: 0,
|
|
928
998
|
outputTokens: 0
|
|
@@ -984,14 +1054,52 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
984
1054
|
const [stream1, stream2] = value.tee();
|
|
985
1055
|
(async () => {
|
|
986
1056
|
try {
|
|
1057
|
+
const contentBlocks = [];
|
|
987
1058
|
let accumulatedContent = '';
|
|
988
1059
|
let usage = {
|
|
989
1060
|
inputTokens: 0,
|
|
990
1061
|
outputTokens: 0
|
|
991
1062
|
};
|
|
1063
|
+
// Map to track in-progress tool calls
|
|
1064
|
+
const toolCallsInProgress = new Map();
|
|
992
1065
|
for await (const chunk of stream1) {
|
|
993
|
-
const
|
|
994
|
-
|
|
1066
|
+
const choice = chunk?.choices?.[0];
|
|
1067
|
+
// Handle text content
|
|
1068
|
+
const deltaContent = choice?.delta?.content;
|
|
1069
|
+
if (deltaContent) {
|
|
1070
|
+
accumulatedContent += deltaContent;
|
|
1071
|
+
}
|
|
1072
|
+
// Handle tool calls
|
|
1073
|
+
const deltaToolCalls = choice?.delta?.tool_calls;
|
|
1074
|
+
if (deltaToolCalls && Array.isArray(deltaToolCalls)) {
|
|
1075
|
+
for (const toolCall of deltaToolCalls) {
|
|
1076
|
+
const index = toolCall.index;
|
|
1077
|
+
if (index !== undefined) {
|
|
1078
|
+
if (!toolCallsInProgress.has(index)) {
|
|
1079
|
+
// New tool call
|
|
1080
|
+
toolCallsInProgress.set(index, {
|
|
1081
|
+
id: toolCall.id || '',
|
|
1082
|
+
name: toolCall.function?.name || '',
|
|
1083
|
+
arguments: ''
|
|
1084
|
+
});
|
|
1085
|
+
}
|
|
1086
|
+
const inProgressCall = toolCallsInProgress.get(index);
|
|
1087
|
+
if (inProgressCall) {
|
|
1088
|
+
// Update tool call data
|
|
1089
|
+
if (toolCall.id) {
|
|
1090
|
+
inProgressCall.id = toolCall.id;
|
|
1091
|
+
}
|
|
1092
|
+
if (toolCall.function?.name) {
|
|
1093
|
+
inProgressCall.name = toolCall.function.name;
|
|
1094
|
+
}
|
|
1095
|
+
if (toolCall.function?.arguments) {
|
|
1096
|
+
inProgressCall.arguments += toolCall.function.arguments;
|
|
1097
|
+
}
|
|
1098
|
+
}
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
}
|
|
1102
|
+
// Handle usage information
|
|
995
1103
|
if (chunk.usage) {
|
|
996
1104
|
usage = {
|
|
997
1105
|
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
@@ -1001,6 +1109,37 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
1001
1109
|
};
|
|
1002
1110
|
}
|
|
1003
1111
|
}
|
|
1112
|
+
// Build final content blocks
|
|
1113
|
+
if (accumulatedContent) {
|
|
1114
|
+
contentBlocks.push({
|
|
1115
|
+
type: 'text',
|
|
1116
|
+
text: accumulatedContent
|
|
1117
|
+
});
|
|
1118
|
+
}
|
|
1119
|
+
// Add completed tool calls to content blocks
|
|
1120
|
+
for (const toolCall of toolCallsInProgress.values()) {
|
|
1121
|
+
if (toolCall.name) {
|
|
1122
|
+
contentBlocks.push({
|
|
1123
|
+
type: 'function',
|
|
1124
|
+
id: toolCall.id,
|
|
1125
|
+
function: {
|
|
1126
|
+
name: toolCall.name,
|
|
1127
|
+
arguments: toolCall.arguments
|
|
1128
|
+
}
|
|
1129
|
+
});
|
|
1130
|
+
}
|
|
1131
|
+
}
|
|
1132
|
+
// Format output to match non-streaming version
|
|
1133
|
+
const formattedOutput = contentBlocks.length > 0 ? [{
|
|
1134
|
+
role: 'assistant',
|
|
1135
|
+
content: contentBlocks
|
|
1136
|
+
}] : [{
|
|
1137
|
+
role: 'assistant',
|
|
1138
|
+
content: [{
|
|
1139
|
+
type: 'text',
|
|
1140
|
+
text: ''
|
|
1141
|
+
}]
|
|
1142
|
+
}];
|
|
1004
1143
|
const latency = (Date.now() - startTime) / 1000;
|
|
1005
1144
|
await sendEventToPosthog({
|
|
1006
1145
|
client: this.phClient,
|
|
@@ -1009,10 +1148,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
1009
1148
|
model: openAIParams.model,
|
|
1010
1149
|
provider: 'azure',
|
|
1011
1150
|
input: openAIParams.messages,
|
|
1012
|
-
output:
|
|
1013
|
-
content: accumulatedContent,
|
|
1014
|
-
role: 'assistant'
|
|
1015
|
-
}],
|
|
1151
|
+
output: formattedOutput,
|
|
1016
1152
|
latency,
|
|
1017
1153
|
baseURL: this.baseURL ?? '',
|
|
1018
1154
|
params: body,
|
|
@@ -1021,6 +1157,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
1021
1157
|
captureImmediate: posthogCaptureImmediate
|
|
1022
1158
|
});
|
|
1023
1159
|
} catch (error) {
|
|
1160
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1024
1161
|
await sendEventToPosthog({
|
|
1025
1162
|
client: this.phClient,
|
|
1026
1163
|
distinctId: posthogDistinctId,
|
|
@@ -1032,7 +1169,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
1032
1169
|
latency: 0,
|
|
1033
1170
|
baseURL: this.baseURL ?? '',
|
|
1034
1171
|
params: body,
|
|
1035
|
-
httpStatus
|
|
1172
|
+
httpStatus,
|
|
1036
1173
|
usage: {
|
|
1037
1174
|
inputTokens: 0,
|
|
1038
1175
|
outputTokens: 0
|
|
@@ -1075,6 +1212,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
1075
1212
|
}
|
|
1076
1213
|
return result;
|
|
1077
1214
|
}, async error => {
|
|
1215
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1078
1216
|
await sendEventToPosthog({
|
|
1079
1217
|
client: this.phClient,
|
|
1080
1218
|
distinctId: posthogDistinctId,
|
|
@@ -1086,7 +1224,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
1086
1224
|
latency: 0,
|
|
1087
1225
|
baseURL: this.baseURL ?? '',
|
|
1088
1226
|
params: body,
|
|
1089
|
-
httpStatus
|
|
1227
|
+
httpStatus,
|
|
1090
1228
|
usage: {
|
|
1091
1229
|
inputTokens: 0,
|
|
1092
1230
|
outputTokens: 0
|
|
@@ -1163,6 +1301,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
|
|
|
1163
1301
|
captureImmediate: posthogCaptureImmediate
|
|
1164
1302
|
});
|
|
1165
1303
|
} catch (error) {
|
|
1304
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1166
1305
|
await sendEventToPosthog({
|
|
1167
1306
|
client: this.phClient,
|
|
1168
1307
|
distinctId: posthogDistinctId,
|
|
@@ -1175,7 +1314,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
|
|
|
1175
1314
|
latency: 0,
|
|
1176
1315
|
baseURL: this.baseURL ?? '',
|
|
1177
1316
|
params: body,
|
|
1178
|
-
httpStatus
|
|
1317
|
+
httpStatus,
|
|
1179
1318
|
usage: {
|
|
1180
1319
|
inputTokens: 0,
|
|
1181
1320
|
outputTokens: 0
|
|
@@ -1218,6 +1357,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
|
|
|
1218
1357
|
}
|
|
1219
1358
|
return result;
|
|
1220
1359
|
}, async error => {
|
|
1360
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1221
1361
|
await sendEventToPosthog({
|
|
1222
1362
|
client: this.phClient,
|
|
1223
1363
|
distinctId: posthogDistinctId,
|
|
@@ -1230,7 +1370,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
|
|
|
1230
1370
|
latency: 0,
|
|
1231
1371
|
baseURL: this.baseURL ?? '',
|
|
1232
1372
|
params: body,
|
|
1233
|
-
httpStatus
|
|
1373
|
+
httpStatus,
|
|
1234
1374
|
usage: {
|
|
1235
1375
|
inputTokens: 0,
|
|
1236
1376
|
outputTokens: 0
|
|
@@ -1597,6 +1737,8 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1597
1737
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
1598
1738
|
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
1599
1739
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
1740
|
+
// Map to track in-progress tool calls
|
|
1741
|
+
const toolCallsInProgress = new Map();
|
|
1600
1742
|
try {
|
|
1601
1743
|
const {
|
|
1602
1744
|
stream,
|
|
@@ -1611,6 +1753,34 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1611
1753
|
if (chunk.type === 'reasoning-delta') {
|
|
1612
1754
|
reasoningText += chunk.delta; // New in v5
|
|
1613
1755
|
}
|
|
1756
|
+
// Handle tool call chunks
|
|
1757
|
+
if (chunk.type === 'tool-input-start') {
|
|
1758
|
+
// Initialize a new tool call
|
|
1759
|
+
toolCallsInProgress.set(chunk.id, {
|
|
1760
|
+
toolCallId: chunk.id,
|
|
1761
|
+
toolName: chunk.toolName,
|
|
1762
|
+
input: ''
|
|
1763
|
+
});
|
|
1764
|
+
}
|
|
1765
|
+
if (chunk.type === 'tool-input-delta') {
|
|
1766
|
+
// Accumulate tool call arguments
|
|
1767
|
+
const toolCall = toolCallsInProgress.get(chunk.id);
|
|
1768
|
+
if (toolCall) {
|
|
1769
|
+
toolCall.input += chunk.delta;
|
|
1770
|
+
}
|
|
1771
|
+
}
|
|
1772
|
+
if (chunk.type === 'tool-input-end') {
|
|
1773
|
+
// Tool call is complete, keep it in the map for final processing
|
|
1774
|
+
// Nothing specific to do here, the tool call is already complete
|
|
1775
|
+
}
|
|
1776
|
+
if (chunk.type === 'tool-call') {
|
|
1777
|
+
// Direct tool call chunk (complete tool call)
|
|
1778
|
+
toolCallsInProgress.set(chunk.toolCallId, {
|
|
1779
|
+
toolCallId: chunk.toolCallId,
|
|
1780
|
+
toolName: chunk.toolName,
|
|
1781
|
+
input: chunk.input
|
|
1782
|
+
});
|
|
1783
|
+
}
|
|
1614
1784
|
if (chunk.type === 'finish') {
|
|
1615
1785
|
const providerMetadata = chunk.providerMetadata;
|
|
1616
1786
|
const additionalTokenValues = {
|
|
@@ -1644,6 +1814,19 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1644
1814
|
text: truncate(generatedText)
|
|
1645
1815
|
});
|
|
1646
1816
|
}
|
|
1817
|
+
// Add completed tool calls to content
|
|
1818
|
+
for (const toolCall of toolCallsInProgress.values()) {
|
|
1819
|
+
if (toolCall.toolName) {
|
|
1820
|
+
content.push({
|
|
1821
|
+
type: 'tool-call',
|
|
1822
|
+
id: toolCall.toolCallId,
|
|
1823
|
+
function: {
|
|
1824
|
+
name: toolCall.toolName,
|
|
1825
|
+
arguments: toolCall.input
|
|
1826
|
+
}
|
|
1827
|
+
});
|
|
1828
|
+
}
|
|
1829
|
+
}
|
|
1647
1830
|
// Structure output like mapVercelOutput does
|
|
1648
1831
|
const output = content.length > 0 ? [{
|
|
1649
1832
|
role: 'assistant',
|
|
@@ -1746,6 +1929,9 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1746
1929
|
if (anthropicParams.stream) {
|
|
1747
1930
|
return parentPromise.then(value => {
|
|
1748
1931
|
let accumulatedContent = '';
|
|
1932
|
+
const contentBlocks = [];
|
|
1933
|
+
const toolsInProgress = new Map();
|
|
1934
|
+
let currentTextBlock = null;
|
|
1749
1935
|
const usage = {
|
|
1750
1936
|
inputTokens: 0,
|
|
1751
1937
|
outputTokens: 0,
|
|
@@ -1757,10 +1943,70 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1757
1943
|
(async () => {
|
|
1758
1944
|
try {
|
|
1759
1945
|
for await (const chunk of stream1) {
|
|
1946
|
+
// Handle content block start events
|
|
1947
|
+
if (chunk.type === 'content_block_start') {
|
|
1948
|
+
if (chunk.content_block?.type === 'text') {
|
|
1949
|
+
currentTextBlock = {
|
|
1950
|
+
type: 'text',
|
|
1951
|
+
text: ''
|
|
1952
|
+
};
|
|
1953
|
+
contentBlocks.push(currentTextBlock);
|
|
1954
|
+
} else if (chunk.content_block?.type === 'tool_use') {
|
|
1955
|
+
const toolBlock = {
|
|
1956
|
+
type: 'function',
|
|
1957
|
+
id: chunk.content_block.id,
|
|
1958
|
+
function: {
|
|
1959
|
+
name: chunk.content_block.name,
|
|
1960
|
+
arguments: {}
|
|
1961
|
+
}
|
|
1962
|
+
};
|
|
1963
|
+
contentBlocks.push(toolBlock);
|
|
1964
|
+
toolsInProgress.set(chunk.content_block.id, {
|
|
1965
|
+
block: toolBlock,
|
|
1966
|
+
inputString: ''
|
|
1967
|
+
});
|
|
1968
|
+
currentTextBlock = null;
|
|
1969
|
+
}
|
|
1970
|
+
}
|
|
1971
|
+
// Handle text delta events
|
|
1760
1972
|
if ('delta' in chunk) {
|
|
1761
1973
|
if ('text' in chunk.delta) {
|
|
1762
1974
|
const delta = chunk?.delta?.text ?? '';
|
|
1763
1975
|
accumulatedContent += delta;
|
|
1976
|
+
if (currentTextBlock) {
|
|
1977
|
+
currentTextBlock.text += delta;
|
|
1978
|
+
}
|
|
1979
|
+
}
|
|
1980
|
+
}
|
|
1981
|
+
// Handle tool input delta events
|
|
1982
|
+
if (chunk.type === 'content_block_delta' && chunk.delta?.type === 'input_json_delta') {
|
|
1983
|
+
const block = chunk.index !== undefined ? contentBlocks[chunk.index] : undefined;
|
|
1984
|
+
const toolId = block?.type === 'function' ? block.id : undefined;
|
|
1985
|
+
if (toolId && toolsInProgress.has(toolId)) {
|
|
1986
|
+
const tool = toolsInProgress.get(toolId);
|
|
1987
|
+
if (tool) {
|
|
1988
|
+
tool.inputString += chunk.delta.partial_json || '';
|
|
1989
|
+
}
|
|
1990
|
+
}
|
|
1991
|
+
}
|
|
1992
|
+
// Handle content block stop events
|
|
1993
|
+
if (chunk.type === 'content_block_stop') {
|
|
1994
|
+
currentTextBlock = null;
|
|
1995
|
+
// Parse accumulated tool input
|
|
1996
|
+
if (chunk.index !== undefined) {
|
|
1997
|
+
const block = contentBlocks[chunk.index];
|
|
1998
|
+
if (block?.type === 'function' && block.id && toolsInProgress.has(block.id)) {
|
|
1999
|
+
const tool = toolsInProgress.get(block.id);
|
|
2000
|
+
if (tool) {
|
|
2001
|
+
try {
|
|
2002
|
+
block.function.arguments = JSON.parse(tool.inputString);
|
|
2003
|
+
} catch (e) {
|
|
2004
|
+
// Keep empty object if parsing fails
|
|
2005
|
+
console.error('Error parsing tool input:', e);
|
|
2006
|
+
}
|
|
2007
|
+
}
|
|
2008
|
+
toolsInProgress.delete(block.id);
|
|
2009
|
+
}
|
|
1764
2010
|
}
|
|
1765
2011
|
}
|
|
1766
2012
|
if (chunk.type == 'message_start') {
|
|
@@ -1774,6 +2020,17 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1774
2020
|
}
|
|
1775
2021
|
const latency = (Date.now() - startTime) / 1000;
|
|
1776
2022
|
const availableTools = extractAvailableToolCalls('anthropic', anthropicParams);
|
|
2023
|
+
// Format output to match non-streaming version
|
|
2024
|
+
const formattedOutput = contentBlocks.length > 0 ? [{
|
|
2025
|
+
role: 'assistant',
|
|
2026
|
+
content: contentBlocks
|
|
2027
|
+
}] : [{
|
|
2028
|
+
role: 'assistant',
|
|
2029
|
+
content: [{
|
|
2030
|
+
type: 'text',
|
|
2031
|
+
text: accumulatedContent
|
|
2032
|
+
}]
|
|
2033
|
+
}];
|
|
1777
2034
|
await sendEventToPosthog({
|
|
1778
2035
|
client: this.phClient,
|
|
1779
2036
|
distinctId: posthogDistinctId,
|
|
@@ -1781,10 +2038,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1781
2038
|
model: anthropicParams.model,
|
|
1782
2039
|
provider: 'anthropic',
|
|
1783
2040
|
input: sanitizeAnthropic(mergeSystemPrompt(anthropicParams, 'anthropic')),
|
|
1784
|
-
output:
|
|
1785
|
-
content: accumulatedContent,
|
|
1786
|
-
role: 'assistant'
|
|
1787
|
-
}],
|
|
2041
|
+
output: formattedOutput,
|
|
1788
2042
|
latency,
|
|
1789
2043
|
baseURL: this.baseURL ?? '',
|
|
1790
2044
|
params: body,
|
|
@@ -1909,6 +2163,7 @@ class WrappedModels {
|
|
|
1909
2163
|
const response = await this.client.models.generateContent(geminiParams);
|
|
1910
2164
|
const latency = (Date.now() - startTime) / 1000;
|
|
1911
2165
|
const availableTools = extractAvailableToolCalls('gemini', geminiParams);
|
|
2166
|
+
const metadata = response.usageMetadata;
|
|
1912
2167
|
await sendEventToPosthog({
|
|
1913
2168
|
client: this.phClient,
|
|
1914
2169
|
distinctId: posthogDistinctId,
|
|
@@ -1922,10 +2177,10 @@ class WrappedModels {
|
|
|
1922
2177
|
params: params,
|
|
1923
2178
|
httpStatus: 200,
|
|
1924
2179
|
usage: {
|
|
1925
|
-
inputTokens:
|
|
1926
|
-
outputTokens:
|
|
1927
|
-
reasoningTokens:
|
|
1928
|
-
cacheReadInputTokens:
|
|
2180
|
+
inputTokens: metadata?.promptTokenCount ?? 0,
|
|
2181
|
+
outputTokens: metadata?.candidatesTokenCount ?? 0,
|
|
2182
|
+
reasoningTokens: metadata?.thoughtsTokenCount ?? 0,
|
|
2183
|
+
cacheReadInputTokens: metadata?.cachedContentTokenCount ?? 0
|
|
1929
2184
|
},
|
|
1930
2185
|
tools: availableTools,
|
|
1931
2186
|
captureImmediate: posthogCaptureImmediate
|
|
@@ -1967,7 +2222,7 @@ class WrappedModels {
|
|
|
1967
2222
|
} = params;
|
|
1968
2223
|
const traceId = posthogTraceId ?? v4();
|
|
1969
2224
|
const startTime = Date.now();
|
|
1970
|
-
|
|
2225
|
+
const accumulatedContent = [];
|
|
1971
2226
|
let usage = {
|
|
1972
2227
|
inputTokens: 0,
|
|
1973
2228
|
outputTokens: 0
|
|
@@ -1975,21 +2230,66 @@ class WrappedModels {
|
|
|
1975
2230
|
try {
|
|
1976
2231
|
const stream = await this.client.models.generateContentStream(geminiParams);
|
|
1977
2232
|
for await (const chunk of stream) {
|
|
2233
|
+
// Handle text content
|
|
1978
2234
|
if (chunk.text) {
|
|
1979
|
-
|
|
2235
|
+
// Find if we already have a text item to append to
|
|
2236
|
+
let lastTextItem;
|
|
2237
|
+
for (let i = accumulatedContent.length - 1; i >= 0; i--) {
|
|
2238
|
+
if (accumulatedContent[i].type === 'text') {
|
|
2239
|
+
lastTextItem = accumulatedContent[i];
|
|
2240
|
+
break;
|
|
2241
|
+
}
|
|
2242
|
+
}
|
|
2243
|
+
if (lastTextItem && lastTextItem.type === 'text') {
|
|
2244
|
+
lastTextItem.text += chunk.text;
|
|
2245
|
+
} else {
|
|
2246
|
+
accumulatedContent.push({
|
|
2247
|
+
type: 'text',
|
|
2248
|
+
text: chunk.text
|
|
2249
|
+
});
|
|
2250
|
+
}
|
|
1980
2251
|
}
|
|
2252
|
+
// Handle function calls from candidates
|
|
2253
|
+
if (chunk.candidates && Array.isArray(chunk.candidates)) {
|
|
2254
|
+
for (const candidate of chunk.candidates) {
|
|
2255
|
+
if (candidate.content && candidate.content.parts) {
|
|
2256
|
+
for (const part of candidate.content.parts) {
|
|
2257
|
+
// Type-safe check for functionCall
|
|
2258
|
+
if ('functionCall' in part) {
|
|
2259
|
+
const funcCall = part.functionCall;
|
|
2260
|
+
if (funcCall?.name) {
|
|
2261
|
+
accumulatedContent.push({
|
|
2262
|
+
type: 'function',
|
|
2263
|
+
function: {
|
|
2264
|
+
name: funcCall.name,
|
|
2265
|
+
arguments: funcCall.args || {}
|
|
2266
|
+
}
|
|
2267
|
+
});
|
|
2268
|
+
}
|
|
2269
|
+
}
|
|
2270
|
+
}
|
|
2271
|
+
}
|
|
2272
|
+
}
|
|
2273
|
+
}
|
|
2274
|
+
// Update usage metadata - handle both old and new field names
|
|
1981
2275
|
if (chunk.usageMetadata) {
|
|
2276
|
+
const metadata = chunk.usageMetadata;
|
|
1982
2277
|
usage = {
|
|
1983
|
-
inputTokens:
|
|
1984
|
-
outputTokens:
|
|
1985
|
-
reasoningTokens:
|
|
1986
|
-
cacheReadInputTokens:
|
|
2278
|
+
inputTokens: metadata.promptTokenCount ?? 0,
|
|
2279
|
+
outputTokens: metadata.candidatesTokenCount ?? 0,
|
|
2280
|
+
reasoningTokens: metadata.thoughtsTokenCount ?? 0,
|
|
2281
|
+
cacheReadInputTokens: metadata.cachedContentTokenCount ?? 0
|
|
1987
2282
|
};
|
|
1988
2283
|
}
|
|
1989
2284
|
yield chunk;
|
|
1990
2285
|
}
|
|
1991
2286
|
const latency = (Date.now() - startTime) / 1000;
|
|
1992
2287
|
const availableTools = extractAvailableToolCalls('gemini', geminiParams);
|
|
2288
|
+
// Format output similar to formatResponseGemini
|
|
2289
|
+
const output = accumulatedContent.length > 0 ? [{
|
|
2290
|
+
role: 'assistant',
|
|
2291
|
+
content: accumulatedContent
|
|
2292
|
+
}] : [];
|
|
1993
2293
|
await sendEventToPosthog({
|
|
1994
2294
|
client: this.phClient,
|
|
1995
2295
|
distinctId: posthogDistinctId,
|
|
@@ -1997,10 +2297,7 @@ class WrappedModels {
|
|
|
1997
2297
|
model: geminiParams.model,
|
|
1998
2298
|
provider: 'gemini',
|
|
1999
2299
|
input: this.formatInputForPostHog(geminiParams.contents),
|
|
2000
|
-
output
|
|
2001
|
-
content: accumulatedContent,
|
|
2002
|
-
role: 'assistant'
|
|
2003
|
-
}],
|
|
2300
|
+
output,
|
|
2004
2301
|
latency,
|
|
2005
2302
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
2006
2303
|
params: params,
|
|
@@ -2050,22 +2347,28 @@ class WrappedModels {
|
|
|
2050
2347
|
};
|
|
2051
2348
|
}
|
|
2052
2349
|
if (item && typeof item === 'object') {
|
|
2053
|
-
|
|
2350
|
+
const obj = item;
|
|
2351
|
+
if ('text' in obj && obj.text) {
|
|
2054
2352
|
return {
|
|
2055
|
-
role:
|
|
2056
|
-
content:
|
|
2353
|
+
role: obj.role || 'user',
|
|
2354
|
+
content: obj.text
|
|
2057
2355
|
};
|
|
2058
2356
|
}
|
|
2059
|
-
if (
|
|
2357
|
+
if ('content' in obj && obj.content) {
|
|
2060
2358
|
return {
|
|
2061
|
-
role:
|
|
2062
|
-
content:
|
|
2359
|
+
role: obj.role || 'user',
|
|
2360
|
+
content: obj.content
|
|
2063
2361
|
};
|
|
2064
2362
|
}
|
|
2065
|
-
if (
|
|
2363
|
+
if ('parts' in obj && Array.isArray(obj.parts)) {
|
|
2066
2364
|
return {
|
|
2067
|
-
role:
|
|
2068
|
-
content:
|
|
2365
|
+
role: obj.role || 'user',
|
|
2366
|
+
content: obj.parts.map(part => {
|
|
2367
|
+
if (part && typeof part === 'object' && 'text' in part) {
|
|
2368
|
+
return part.text;
|
|
2369
|
+
}
|
|
2370
|
+
return part;
|
|
2371
|
+
})
|
|
2069
2372
|
};
|
|
2070
2373
|
}
|
|
2071
2374
|
}
|
|
@@ -2076,16 +2379,17 @@ class WrappedModels {
|
|
|
2076
2379
|
});
|
|
2077
2380
|
}
|
|
2078
2381
|
if (contents && typeof contents === 'object') {
|
|
2079
|
-
|
|
2382
|
+
const obj = contents;
|
|
2383
|
+
if ('text' in obj && obj.text) {
|
|
2080
2384
|
return [{
|
|
2081
2385
|
role: 'user',
|
|
2082
|
-
content:
|
|
2386
|
+
content: obj.text
|
|
2083
2387
|
}];
|
|
2084
2388
|
}
|
|
2085
|
-
if (
|
|
2389
|
+
if ('content' in obj && obj.content) {
|
|
2086
2390
|
return [{
|
|
2087
2391
|
role: 'user',
|
|
2088
|
-
content:
|
|
2392
|
+
content: obj.content
|
|
2089
2393
|
}];
|
|
2090
2394
|
}
|
|
2091
2395
|
}
|