@ai-sdk/xai 3.0.0-beta.52 → 3.0.0-beta.53
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/index.js +219 -206
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +219 -206
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
|
@@ -320,6 +320,27 @@ function prepareTools({
|
|
|
320
320
|
}
|
|
321
321
|
}
|
|
322
322
|
|
|
323
|
+
// src/convert-xai-chat-usage.ts
|
|
324
|
+
function convertXaiChatUsage(usage) {
|
|
325
|
+
var _a, _b, _c, _d;
|
|
326
|
+
const cacheReadTokens = (_b = (_a = usage.prompt_tokens_details) == null ? void 0 : _a.cached_tokens) != null ? _b : 0;
|
|
327
|
+
const reasoningTokens = (_d = (_c = usage.completion_tokens_details) == null ? void 0 : _c.reasoning_tokens) != null ? _d : 0;
|
|
328
|
+
return {
|
|
329
|
+
inputTokens: {
|
|
330
|
+
total: usage.prompt_tokens,
|
|
331
|
+
noCache: usage.prompt_tokens - cacheReadTokens,
|
|
332
|
+
cacheRead: cacheReadTokens,
|
|
333
|
+
cacheWrite: void 0
|
|
334
|
+
},
|
|
335
|
+
outputTokens: {
|
|
336
|
+
total: usage.completion_tokens,
|
|
337
|
+
text: usage.completion_tokens - reasoningTokens,
|
|
338
|
+
reasoning: reasoningTokens
|
|
339
|
+
},
|
|
340
|
+
raw: usage
|
|
341
|
+
};
|
|
342
|
+
}
|
|
343
|
+
|
|
323
344
|
// src/xai-chat-language-model.ts
|
|
324
345
|
var XaiChatLanguageModel = class {
|
|
325
346
|
constructor(modelId, config) {
|
|
@@ -444,7 +465,7 @@ var XaiChatLanguageModel = class {
|
|
|
444
465
|
};
|
|
445
466
|
}
|
|
446
467
|
async doGenerate(options) {
|
|
447
|
-
var _a
|
|
468
|
+
var _a;
|
|
448
469
|
const { args: body, warnings } = await this.getArgs(options);
|
|
449
470
|
const {
|
|
450
471
|
responseHeaders,
|
|
@@ -502,13 +523,7 @@ var XaiChatLanguageModel = class {
|
|
|
502
523
|
return {
|
|
503
524
|
content,
|
|
504
525
|
finishReason: mapXaiFinishReason(choice.finish_reason),
|
|
505
|
-
usage:
|
|
506
|
-
inputTokens: response.usage.prompt_tokens,
|
|
507
|
-
outputTokens: response.usage.completion_tokens,
|
|
508
|
-
totalTokens: response.usage.total_tokens,
|
|
509
|
-
reasoningTokens: (_c = (_b = response.usage.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null ? _c : void 0,
|
|
510
|
-
cachedInputTokens: (_e = (_d = response.usage.prompt_tokens_details) == null ? void 0 : _d.cached_tokens) != null ? _e : void 0
|
|
511
|
-
},
|
|
526
|
+
usage: convertXaiChatUsage(response.usage),
|
|
512
527
|
request: { body },
|
|
513
528
|
response: {
|
|
514
529
|
...getResponseMetadata(response),
|
|
@@ -538,13 +553,7 @@ var XaiChatLanguageModel = class {
|
|
|
538
553
|
fetch: this.config.fetch
|
|
539
554
|
});
|
|
540
555
|
let finishReason = "unknown";
|
|
541
|
-
|
|
542
|
-
inputTokens: void 0,
|
|
543
|
-
outputTokens: void 0,
|
|
544
|
-
totalTokens: void 0,
|
|
545
|
-
reasoningTokens: void 0,
|
|
546
|
-
cachedInputTokens: void 0
|
|
547
|
-
};
|
|
556
|
+
let usage = void 0;
|
|
548
557
|
let isFirstChunk = true;
|
|
549
558
|
const contentBlocks = {};
|
|
550
559
|
const lastReasoningDeltas = {};
|
|
@@ -556,7 +565,6 @@ var XaiChatLanguageModel = class {
|
|
|
556
565
|
controller.enqueue({ type: "stream-start", warnings });
|
|
557
566
|
},
|
|
558
567
|
transform(chunk, controller) {
|
|
559
|
-
var _a2, _b, _c, _d;
|
|
560
568
|
if (options.includeRawChunks) {
|
|
561
569
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
562
570
|
}
|
|
@@ -583,11 +591,7 @@ var XaiChatLanguageModel = class {
|
|
|
583
591
|
}
|
|
584
592
|
}
|
|
585
593
|
if (value.usage != null) {
|
|
586
|
-
usage
|
|
587
|
-
usage.outputTokens = value.usage.completion_tokens;
|
|
588
|
-
usage.totalTokens = value.usage.total_tokens;
|
|
589
|
-
usage.reasoningTokens = (_b = (_a2 = value.usage.completion_tokens_details) == null ? void 0 : _a2.reasoning_tokens) != null ? _b : void 0;
|
|
590
|
-
usage.cachedInputTokens = (_d = (_c = value.usage.prompt_tokens_details) == null ? void 0 : _c.cached_tokens) != null ? _d : void 0;
|
|
594
|
+
usage = convertXaiChatUsage(value.usage);
|
|
591
595
|
}
|
|
592
596
|
const choice = value.choices[0];
|
|
593
597
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
@@ -763,6 +767,189 @@ import {
|
|
|
763
767
|
postJsonToApi as postJsonToApi2
|
|
764
768
|
} from "@ai-sdk/provider-utils";
|
|
765
769
|
|
|
770
|
+
// src/responses/convert-to-xai-responses-input.ts
|
|
771
|
+
async function convertToXaiResponsesInput({
|
|
772
|
+
prompt
|
|
773
|
+
}) {
|
|
774
|
+
var _a, _b, _c, _d, _e;
|
|
775
|
+
const input = [];
|
|
776
|
+
const inputWarnings = [];
|
|
777
|
+
for (const message of prompt) {
|
|
778
|
+
switch (message.role) {
|
|
779
|
+
case "system": {
|
|
780
|
+
input.push({
|
|
781
|
+
role: "system",
|
|
782
|
+
content: message.content
|
|
783
|
+
});
|
|
784
|
+
break;
|
|
785
|
+
}
|
|
786
|
+
case "user": {
|
|
787
|
+
let userContent = "";
|
|
788
|
+
for (const block of message.content) {
|
|
789
|
+
switch (block.type) {
|
|
790
|
+
case "text": {
|
|
791
|
+
userContent += block.text;
|
|
792
|
+
break;
|
|
793
|
+
}
|
|
794
|
+
case "file": {
|
|
795
|
+
inputWarnings.push({
|
|
796
|
+
type: "other",
|
|
797
|
+
message: `xAI Responses API does not support ${block.type} in user messages`
|
|
798
|
+
});
|
|
799
|
+
break;
|
|
800
|
+
}
|
|
801
|
+
default: {
|
|
802
|
+
const _exhaustiveCheck = block;
|
|
803
|
+
inputWarnings.push({
|
|
804
|
+
type: "other",
|
|
805
|
+
message: "xAI Responses API does not support this content type in user messages"
|
|
806
|
+
});
|
|
807
|
+
}
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
input.push({
|
|
811
|
+
role: "user",
|
|
812
|
+
content: userContent
|
|
813
|
+
});
|
|
814
|
+
break;
|
|
815
|
+
}
|
|
816
|
+
case "assistant": {
|
|
817
|
+
for (const part of message.content) {
|
|
818
|
+
switch (part.type) {
|
|
819
|
+
case "text": {
|
|
820
|
+
const id = typeof ((_b = (_a = part.providerOptions) == null ? void 0 : _a.xai) == null ? void 0 : _b.itemId) === "string" ? part.providerOptions.xai.itemId : void 0;
|
|
821
|
+
input.push({
|
|
822
|
+
role: "assistant",
|
|
823
|
+
content: part.text,
|
|
824
|
+
id
|
|
825
|
+
});
|
|
826
|
+
break;
|
|
827
|
+
}
|
|
828
|
+
case "tool-call": {
|
|
829
|
+
if (part.providerExecuted) {
|
|
830
|
+
break;
|
|
831
|
+
}
|
|
832
|
+
const id = typeof ((_d = (_c = part.providerOptions) == null ? void 0 : _c.xai) == null ? void 0 : _d.itemId) === "string" ? part.providerOptions.xai.itemId : void 0;
|
|
833
|
+
input.push({
|
|
834
|
+
type: "function_call",
|
|
835
|
+
id: id != null ? id : part.toolCallId,
|
|
836
|
+
call_id: part.toolCallId,
|
|
837
|
+
name: part.toolName,
|
|
838
|
+
arguments: JSON.stringify(part.input),
|
|
839
|
+
status: "completed"
|
|
840
|
+
});
|
|
841
|
+
break;
|
|
842
|
+
}
|
|
843
|
+
case "tool-result": {
|
|
844
|
+
break;
|
|
845
|
+
}
|
|
846
|
+
case "reasoning":
|
|
847
|
+
case "file": {
|
|
848
|
+
inputWarnings.push({
|
|
849
|
+
type: "other",
|
|
850
|
+
message: `xAI Responses API does not support ${part.type} in assistant messages`
|
|
851
|
+
});
|
|
852
|
+
break;
|
|
853
|
+
}
|
|
854
|
+
default: {
|
|
855
|
+
const _exhaustiveCheck = part;
|
|
856
|
+
inputWarnings.push({
|
|
857
|
+
type: "other",
|
|
858
|
+
message: "xAI Responses API does not support this content type in assistant messages"
|
|
859
|
+
});
|
|
860
|
+
}
|
|
861
|
+
}
|
|
862
|
+
}
|
|
863
|
+
break;
|
|
864
|
+
}
|
|
865
|
+
case "tool": {
|
|
866
|
+
for (const part of message.content) {
|
|
867
|
+
const output = part.output;
|
|
868
|
+
let outputValue;
|
|
869
|
+
switch (output.type) {
|
|
870
|
+
case "text":
|
|
871
|
+
case "error-text":
|
|
872
|
+
outputValue = output.value;
|
|
873
|
+
break;
|
|
874
|
+
case "execution-denied":
|
|
875
|
+
outputValue = (_e = output.reason) != null ? _e : "tool execution denied";
|
|
876
|
+
break;
|
|
877
|
+
case "json":
|
|
878
|
+
case "error-json":
|
|
879
|
+
outputValue = JSON.stringify(output.value);
|
|
880
|
+
break;
|
|
881
|
+
case "content":
|
|
882
|
+
outputValue = output.value.map((item) => {
|
|
883
|
+
if (item.type === "text") {
|
|
884
|
+
return item.text;
|
|
885
|
+
}
|
|
886
|
+
return "";
|
|
887
|
+
}).join("");
|
|
888
|
+
break;
|
|
889
|
+
default: {
|
|
890
|
+
const _exhaustiveCheck = output;
|
|
891
|
+
outputValue = "";
|
|
892
|
+
}
|
|
893
|
+
}
|
|
894
|
+
input.push({
|
|
895
|
+
type: "function_call_output",
|
|
896
|
+
call_id: part.toolCallId,
|
|
897
|
+
output: outputValue
|
|
898
|
+
});
|
|
899
|
+
}
|
|
900
|
+
break;
|
|
901
|
+
}
|
|
902
|
+
default: {
|
|
903
|
+
const _exhaustiveCheck = message;
|
|
904
|
+
inputWarnings.push({
|
|
905
|
+
type: "other",
|
|
906
|
+
message: "unsupported message role"
|
|
907
|
+
});
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
}
|
|
911
|
+
return { input, inputWarnings };
|
|
912
|
+
}
|
|
913
|
+
|
|
914
|
+
// src/responses/convert-xai-responses-usage.ts
|
|
915
|
+
function convertXaiResponsesUsage(usage) {
|
|
916
|
+
var _a, _b, _c, _d;
|
|
917
|
+
const cacheReadTokens = (_b = (_a = usage.input_tokens_details) == null ? void 0 : _a.cached_tokens) != null ? _b : 0;
|
|
918
|
+
const reasoningTokens = (_d = (_c = usage.output_tokens_details) == null ? void 0 : _c.reasoning_tokens) != null ? _d : 0;
|
|
919
|
+
return {
|
|
920
|
+
inputTokens: {
|
|
921
|
+
total: usage.input_tokens,
|
|
922
|
+
noCache: usage.input_tokens - cacheReadTokens,
|
|
923
|
+
cacheRead: cacheReadTokens,
|
|
924
|
+
cacheWrite: void 0
|
|
925
|
+
},
|
|
926
|
+
outputTokens: {
|
|
927
|
+
total: usage.output_tokens,
|
|
928
|
+
text: usage.output_tokens - reasoningTokens,
|
|
929
|
+
reasoning: reasoningTokens
|
|
930
|
+
},
|
|
931
|
+
raw: usage
|
|
932
|
+
};
|
|
933
|
+
}
|
|
934
|
+
|
|
935
|
+
// src/responses/map-xai-responses-finish-reason.ts
|
|
936
|
+
function mapXaiResponsesFinishReason(finishReason) {
|
|
937
|
+
switch (finishReason) {
|
|
938
|
+
case "stop":
|
|
939
|
+
case "completed":
|
|
940
|
+
return "stop";
|
|
941
|
+
case "length":
|
|
942
|
+
return "length";
|
|
943
|
+
case "tool_calls":
|
|
944
|
+
case "function_call":
|
|
945
|
+
return "tool-calls";
|
|
946
|
+
case "content_filter":
|
|
947
|
+
return "content-filter";
|
|
948
|
+
default:
|
|
949
|
+
return "unknown";
|
|
950
|
+
}
|
|
951
|
+
}
|
|
952
|
+
|
|
766
953
|
// src/responses/xai-responses-api.ts
|
|
767
954
|
import { z as z4 } from "zod/v4";
|
|
768
955
|
var annotationSchema = z4.union([
|
|
@@ -1022,24 +1209,6 @@ var xaiResponsesChunkSchema = z4.union([
|
|
|
1022
1209
|
})
|
|
1023
1210
|
]);
|
|
1024
1211
|
|
|
1025
|
-
// src/responses/map-xai-responses-finish-reason.ts
|
|
1026
|
-
function mapXaiResponsesFinishReason(finishReason) {
|
|
1027
|
-
switch (finishReason) {
|
|
1028
|
-
case "stop":
|
|
1029
|
-
case "completed":
|
|
1030
|
-
return "stop";
|
|
1031
|
-
case "length":
|
|
1032
|
-
return "length";
|
|
1033
|
-
case "tool_calls":
|
|
1034
|
-
case "function_call":
|
|
1035
|
-
return "tool-calls";
|
|
1036
|
-
case "content_filter":
|
|
1037
|
-
return "content-filter";
|
|
1038
|
-
default:
|
|
1039
|
-
return "unknown";
|
|
1040
|
-
}
|
|
1041
|
-
}
|
|
1042
|
-
|
|
1043
1212
|
// src/responses/xai-responses-options.ts
|
|
1044
1213
|
import { z as z5 } from "zod/v4";
|
|
1045
1214
|
var xaiResponsesProviderOptions = z5.object({
|
|
@@ -1059,150 +1228,6 @@ var xaiResponsesProviderOptions = z5.object({
|
|
|
1059
1228
|
previousResponseId: z5.string().optional()
|
|
1060
1229
|
});
|
|
1061
1230
|
|
|
1062
|
-
// src/responses/convert-to-xai-responses-input.ts
|
|
1063
|
-
async function convertToXaiResponsesInput({
|
|
1064
|
-
prompt
|
|
1065
|
-
}) {
|
|
1066
|
-
var _a, _b, _c, _d, _e;
|
|
1067
|
-
const input = [];
|
|
1068
|
-
const inputWarnings = [];
|
|
1069
|
-
for (const message of prompt) {
|
|
1070
|
-
switch (message.role) {
|
|
1071
|
-
case "system": {
|
|
1072
|
-
input.push({
|
|
1073
|
-
role: "system",
|
|
1074
|
-
content: message.content
|
|
1075
|
-
});
|
|
1076
|
-
break;
|
|
1077
|
-
}
|
|
1078
|
-
case "user": {
|
|
1079
|
-
let userContent = "";
|
|
1080
|
-
for (const block of message.content) {
|
|
1081
|
-
switch (block.type) {
|
|
1082
|
-
case "text": {
|
|
1083
|
-
userContent += block.text;
|
|
1084
|
-
break;
|
|
1085
|
-
}
|
|
1086
|
-
case "file": {
|
|
1087
|
-
inputWarnings.push({
|
|
1088
|
-
type: "other",
|
|
1089
|
-
message: `xAI Responses API does not support ${block.type} in user messages`
|
|
1090
|
-
});
|
|
1091
|
-
break;
|
|
1092
|
-
}
|
|
1093
|
-
default: {
|
|
1094
|
-
const _exhaustiveCheck = block;
|
|
1095
|
-
inputWarnings.push({
|
|
1096
|
-
type: "other",
|
|
1097
|
-
message: "xAI Responses API does not support this content type in user messages"
|
|
1098
|
-
});
|
|
1099
|
-
}
|
|
1100
|
-
}
|
|
1101
|
-
}
|
|
1102
|
-
input.push({
|
|
1103
|
-
role: "user",
|
|
1104
|
-
content: userContent
|
|
1105
|
-
});
|
|
1106
|
-
break;
|
|
1107
|
-
}
|
|
1108
|
-
case "assistant": {
|
|
1109
|
-
for (const part of message.content) {
|
|
1110
|
-
switch (part.type) {
|
|
1111
|
-
case "text": {
|
|
1112
|
-
const id = typeof ((_b = (_a = part.providerOptions) == null ? void 0 : _a.xai) == null ? void 0 : _b.itemId) === "string" ? part.providerOptions.xai.itemId : void 0;
|
|
1113
|
-
input.push({
|
|
1114
|
-
role: "assistant",
|
|
1115
|
-
content: part.text,
|
|
1116
|
-
id
|
|
1117
|
-
});
|
|
1118
|
-
break;
|
|
1119
|
-
}
|
|
1120
|
-
case "tool-call": {
|
|
1121
|
-
if (part.providerExecuted) {
|
|
1122
|
-
break;
|
|
1123
|
-
}
|
|
1124
|
-
const id = typeof ((_d = (_c = part.providerOptions) == null ? void 0 : _c.xai) == null ? void 0 : _d.itemId) === "string" ? part.providerOptions.xai.itemId : void 0;
|
|
1125
|
-
input.push({
|
|
1126
|
-
type: "function_call",
|
|
1127
|
-
id: id != null ? id : part.toolCallId,
|
|
1128
|
-
call_id: part.toolCallId,
|
|
1129
|
-
name: part.toolName,
|
|
1130
|
-
arguments: JSON.stringify(part.input),
|
|
1131
|
-
status: "completed"
|
|
1132
|
-
});
|
|
1133
|
-
break;
|
|
1134
|
-
}
|
|
1135
|
-
case "tool-result": {
|
|
1136
|
-
break;
|
|
1137
|
-
}
|
|
1138
|
-
case "reasoning":
|
|
1139
|
-
case "file": {
|
|
1140
|
-
inputWarnings.push({
|
|
1141
|
-
type: "other",
|
|
1142
|
-
message: `xAI Responses API does not support ${part.type} in assistant messages`
|
|
1143
|
-
});
|
|
1144
|
-
break;
|
|
1145
|
-
}
|
|
1146
|
-
default: {
|
|
1147
|
-
const _exhaustiveCheck = part;
|
|
1148
|
-
inputWarnings.push({
|
|
1149
|
-
type: "other",
|
|
1150
|
-
message: "xAI Responses API does not support this content type in assistant messages"
|
|
1151
|
-
});
|
|
1152
|
-
}
|
|
1153
|
-
}
|
|
1154
|
-
}
|
|
1155
|
-
break;
|
|
1156
|
-
}
|
|
1157
|
-
case "tool": {
|
|
1158
|
-
for (const part of message.content) {
|
|
1159
|
-
const output = part.output;
|
|
1160
|
-
let outputValue;
|
|
1161
|
-
switch (output.type) {
|
|
1162
|
-
case "text":
|
|
1163
|
-
case "error-text":
|
|
1164
|
-
outputValue = output.value;
|
|
1165
|
-
break;
|
|
1166
|
-
case "execution-denied":
|
|
1167
|
-
outputValue = (_e = output.reason) != null ? _e : "tool execution denied";
|
|
1168
|
-
break;
|
|
1169
|
-
case "json":
|
|
1170
|
-
case "error-json":
|
|
1171
|
-
outputValue = JSON.stringify(output.value);
|
|
1172
|
-
break;
|
|
1173
|
-
case "content":
|
|
1174
|
-
outputValue = output.value.map((item) => {
|
|
1175
|
-
if (item.type === "text") {
|
|
1176
|
-
return item.text;
|
|
1177
|
-
}
|
|
1178
|
-
return "";
|
|
1179
|
-
}).join("");
|
|
1180
|
-
break;
|
|
1181
|
-
default: {
|
|
1182
|
-
const _exhaustiveCheck = output;
|
|
1183
|
-
outputValue = "";
|
|
1184
|
-
}
|
|
1185
|
-
}
|
|
1186
|
-
input.push({
|
|
1187
|
-
type: "function_call_output",
|
|
1188
|
-
call_id: part.toolCallId,
|
|
1189
|
-
output: outputValue
|
|
1190
|
-
});
|
|
1191
|
-
}
|
|
1192
|
-
break;
|
|
1193
|
-
}
|
|
1194
|
-
default: {
|
|
1195
|
-
const _exhaustiveCheck = message;
|
|
1196
|
-
inputWarnings.push({
|
|
1197
|
-
type: "other",
|
|
1198
|
-
message: "unsupported message role"
|
|
1199
|
-
});
|
|
1200
|
-
}
|
|
1201
|
-
}
|
|
1202
|
-
}
|
|
1203
|
-
return { input, inputWarnings };
|
|
1204
|
-
}
|
|
1205
|
-
|
|
1206
1231
|
// src/responses/xai-responses-prepare-tools.ts
|
|
1207
1232
|
import {
|
|
1208
1233
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
@@ -1557,7 +1582,7 @@ var XaiResponsesLanguageModel = class {
|
|
|
1557
1582
|
};
|
|
1558
1583
|
}
|
|
1559
1584
|
async doGenerate(options) {
|
|
1560
|
-
var _a, _b, _c, _d, _e, _f, _g
|
|
1585
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
1561
1586
|
const {
|
|
1562
1587
|
args: body,
|
|
1563
1588
|
warnings,
|
|
@@ -1654,12 +1679,7 @@ var XaiResponsesLanguageModel = class {
|
|
|
1654
1679
|
return {
|
|
1655
1680
|
content,
|
|
1656
1681
|
finishReason: mapXaiResponsesFinishReason(response.status),
|
|
1657
|
-
usage:
|
|
1658
|
-
inputTokens: response.usage.input_tokens,
|
|
1659
|
-
outputTokens: response.usage.output_tokens,
|
|
1660
|
-
totalTokens: response.usage.total_tokens,
|
|
1661
|
-
reasoningTokens: (_h = response.usage.output_tokens_details) == null ? void 0 : _h.reasoning_tokens
|
|
1662
|
-
},
|
|
1682
|
+
usage: convertXaiResponsesUsage(response.usage),
|
|
1663
1683
|
request: { body },
|
|
1664
1684
|
response: {
|
|
1665
1685
|
...getResponseMetadata(response),
|
|
@@ -1694,11 +1714,7 @@ var XaiResponsesLanguageModel = class {
|
|
|
1694
1714
|
fetch: this.config.fetch
|
|
1695
1715
|
});
|
|
1696
1716
|
let finishReason = "unknown";
|
|
1697
|
-
|
|
1698
|
-
inputTokens: void 0,
|
|
1699
|
-
outputTokens: void 0,
|
|
1700
|
-
totalTokens: void 0
|
|
1701
|
-
};
|
|
1717
|
+
let usage = void 0;
|
|
1702
1718
|
let isFirstChunk = true;
|
|
1703
1719
|
const contentBlocks = {};
|
|
1704
1720
|
const seenToolCalls = /* @__PURE__ */ new Set();
|
|
@@ -1710,7 +1726,7 @@ var XaiResponsesLanguageModel = class {
|
|
|
1710
1726
|
controller.enqueue({ type: "stream-start", warnings });
|
|
1711
1727
|
},
|
|
1712
1728
|
transform(chunk, controller) {
|
|
1713
|
-
var _a2, _b, _c, _d, _e, _f, _g, _h
|
|
1729
|
+
var _a2, _b, _c, _d, _e, _f, _g, _h;
|
|
1714
1730
|
if (options.includeRawChunks) {
|
|
1715
1731
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
1716
1732
|
}
|
|
@@ -1800,10 +1816,7 @@ var XaiResponsesLanguageModel = class {
|
|
|
1800
1816
|
if (event.type === "response.done" || event.type === "response.completed") {
|
|
1801
1817
|
const response2 = event.response;
|
|
1802
1818
|
if (response2.usage) {
|
|
1803
|
-
usage
|
|
1804
|
-
usage.outputTokens = response2.usage.output_tokens;
|
|
1805
|
-
usage.totalTokens = response2.usage.total_tokens;
|
|
1806
|
-
usage.reasoningTokens = (_c = response2.usage.output_tokens_details) == null ? void 0 : _c.reasoning_tokens;
|
|
1819
|
+
usage = convertXaiResponsesUsage(response2.usage);
|
|
1807
1820
|
}
|
|
1808
1821
|
if (response2.status) {
|
|
1809
1822
|
finishReason = mapXaiResponsesFinishReason(response2.status);
|
|
@@ -1826,15 +1839,15 @@ var XaiResponsesLanguageModel = class {
|
|
|
1826
1839
|
"x_semantic_search",
|
|
1827
1840
|
"x_thread_fetch"
|
|
1828
1841
|
];
|
|
1829
|
-
let toolName = (
|
|
1830
|
-
if (webSearchSubTools.includes((
|
|
1842
|
+
let toolName = (_c = part.name) != null ? _c : "";
|
|
1843
|
+
if (webSearchSubTools.includes((_d = part.name) != null ? _d : "")) {
|
|
1831
1844
|
toolName = webSearchToolName != null ? webSearchToolName : "web_search";
|
|
1832
|
-
} else if (xSearchSubTools.includes((
|
|
1845
|
+
} else if (xSearchSubTools.includes((_e = part.name) != null ? _e : "")) {
|
|
1833
1846
|
toolName = xSearchToolName != null ? xSearchToolName : "x_search";
|
|
1834
1847
|
} else if (part.name === "code_execution") {
|
|
1835
1848
|
toolName = codeExecutionToolName != null ? codeExecutionToolName : "code_execution";
|
|
1836
1849
|
}
|
|
1837
|
-
const toolInput = part.type === "custom_tool_call" ? (
|
|
1850
|
+
const toolInput = part.type === "custom_tool_call" ? (_f = part.input) != null ? _f : "" : (_g = part.arguments) != null ? _g : "";
|
|
1838
1851
|
controller.enqueue({
|
|
1839
1852
|
type: "tool-input-start",
|
|
1840
1853
|
id: part.id,
|
|
@@ -1884,7 +1897,7 @@ var XaiResponsesLanguageModel = class {
|
|
|
1884
1897
|
sourceType: "url",
|
|
1885
1898
|
id: self.config.generateId(),
|
|
1886
1899
|
url: annotation.url,
|
|
1887
|
-
title: (
|
|
1900
|
+
title: (_h = annotation.title) != null ? _h : annotation.url
|
|
1888
1901
|
});
|
|
1889
1902
|
}
|
|
1890
1903
|
}
|
|
@@ -1989,7 +2002,7 @@ var xaiTools = {
|
|
|
1989
2002
|
};
|
|
1990
2003
|
|
|
1991
2004
|
// src/version.ts
|
|
1992
|
-
var VERSION = true ? "3.0.0-beta.
|
|
2005
|
+
var VERSION = true ? "3.0.0-beta.53" : "0.0.0-test";
|
|
1993
2006
|
|
|
1994
2007
|
// src/xai-provider.ts
|
|
1995
2008
|
var xaiErrorStructure = {
|