@ai-sdk/xai 2.0.47 → 2.0.49
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.js +113 -48
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +114 -45
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,18 @@
|
|
|
1
1
|
# @ai-sdk/xai
|
|
2
2
|
|
|
3
|
+
## 2.0.49
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 0c419f8: fix(provider/xai): handle error responses returned with 200 status
|
|
8
|
+
|
|
9
|
+
## 2.0.48
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- c7d45b4: fixed streaming tool input for custom_tool_call types (x_search, view_x_video) which were incorrectly returning empty input values
|
|
14
|
+
- 547e0c2: fix (provider/xai): no duplicate text delta in responses api
|
|
15
|
+
|
|
3
16
|
## 2.0.47
|
|
4
17
|
|
|
5
18
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -34,10 +34,11 @@ module.exports = __toCommonJS(src_exports);
|
|
|
34
34
|
|
|
35
35
|
// src/xai-provider.ts
|
|
36
36
|
var import_openai_compatible = require("@ai-sdk/openai-compatible");
|
|
37
|
-
var
|
|
37
|
+
var import_provider5 = require("@ai-sdk/provider");
|
|
38
38
|
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
39
39
|
|
|
40
40
|
// src/xai-chat-language-model.ts
|
|
41
|
+
var import_provider3 = require("@ai-sdk/provider");
|
|
41
42
|
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
42
43
|
var import_v43 = require("zod/v4");
|
|
43
44
|
|
|
@@ -467,14 +468,15 @@ var XaiChatLanguageModel = class {
|
|
|
467
468
|
};
|
|
468
469
|
}
|
|
469
470
|
async doGenerate(options) {
|
|
470
|
-
var _a, _b, _c, _d, _e;
|
|
471
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
471
472
|
const { args: body, warnings } = await this.getArgs(options);
|
|
473
|
+
const url = `${(_a = this.config.baseURL) != null ? _a : "https://api.x.ai/v1"}/chat/completions`;
|
|
472
474
|
const {
|
|
473
475
|
responseHeaders,
|
|
474
476
|
value: response,
|
|
475
477
|
rawValue: rawResponse
|
|
476
478
|
} = await (0, import_provider_utils3.postJsonToApi)({
|
|
477
|
-
url
|
|
479
|
+
url,
|
|
478
480
|
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
479
481
|
body,
|
|
480
482
|
failedResponseHandler: xaiFailedResponseHandler,
|
|
@@ -484,6 +486,27 @@ var XaiChatLanguageModel = class {
|
|
|
484
486
|
abortSignal: options.abortSignal,
|
|
485
487
|
fetch: this.config.fetch
|
|
486
488
|
});
|
|
489
|
+
if (response.error != null) {
|
|
490
|
+
throw new import_provider3.APICallError({
|
|
491
|
+
message: response.error,
|
|
492
|
+
url,
|
|
493
|
+
requestBodyValues: body,
|
|
494
|
+
statusCode: 200,
|
|
495
|
+
responseHeaders,
|
|
496
|
+
responseBody: JSON.stringify(rawResponse),
|
|
497
|
+
isRetryable: response.code === "The service is currently unavailable"
|
|
498
|
+
});
|
|
499
|
+
}
|
|
500
|
+
if (!response.choices || response.choices.length === 0) {
|
|
501
|
+
throw new import_provider3.APICallError({
|
|
502
|
+
message: "No choices returned from the API",
|
|
503
|
+
url,
|
|
504
|
+
requestBodyValues: body,
|
|
505
|
+
statusCode: 200,
|
|
506
|
+
responseHeaders,
|
|
507
|
+
responseBody: JSON.stringify(rawResponse)
|
|
508
|
+
});
|
|
509
|
+
}
|
|
487
510
|
const choice = response.choices[0];
|
|
488
511
|
const content = [];
|
|
489
512
|
if (choice.message.content != null && choice.message.content.length > 0) {
|
|
@@ -513,12 +536,12 @@ var XaiChatLanguageModel = class {
|
|
|
513
536
|
}
|
|
514
537
|
}
|
|
515
538
|
if (response.citations != null) {
|
|
516
|
-
for (const
|
|
539
|
+
for (const url2 of response.citations) {
|
|
517
540
|
content.push({
|
|
518
541
|
type: "source",
|
|
519
542
|
sourceType: "url",
|
|
520
543
|
id: this.config.generateId(),
|
|
521
|
-
url
|
|
544
|
+
url: url2
|
|
522
545
|
});
|
|
523
546
|
}
|
|
524
547
|
}
|
|
@@ -526,11 +549,11 @@ var XaiChatLanguageModel = class {
|
|
|
526
549
|
content,
|
|
527
550
|
finishReason: mapXaiFinishReason(choice.finish_reason),
|
|
528
551
|
usage: {
|
|
529
|
-
inputTokens: response.usage.prompt_tokens,
|
|
530
|
-
outputTokens: response.usage.completion_tokens,
|
|
531
|
-
totalTokens: response.usage.total_tokens,
|
|
532
|
-
reasoningTokens: (
|
|
533
|
-
cachedInputTokens: (
|
|
552
|
+
inputTokens: (_b = response.usage) == null ? void 0 : _b.prompt_tokens,
|
|
553
|
+
outputTokens: (_c = response.usage) == null ? void 0 : _c.completion_tokens,
|
|
554
|
+
totalTokens: (_d = response.usage) == null ? void 0 : _d.total_tokens,
|
|
555
|
+
reasoningTokens: (_g = (_f = (_e = response.usage) == null ? void 0 : _e.completion_tokens_details) == null ? void 0 : _f.reasoning_tokens) != null ? _g : void 0,
|
|
556
|
+
cachedInputTokens: (_j = (_i = (_h = response.usage) == null ? void 0 : _h.prompt_tokens_details) == null ? void 0 : _i.cached_tokens) != null ? _j : void 0
|
|
534
557
|
},
|
|
535
558
|
request: { body },
|
|
536
559
|
response: {
|
|
@@ -551,12 +574,47 @@ var XaiChatLanguageModel = class {
|
|
|
551
574
|
include_usage: true
|
|
552
575
|
}
|
|
553
576
|
};
|
|
577
|
+
const url = `${(_a = this.config.baseURL) != null ? _a : "https://api.x.ai/v1"}/chat/completions`;
|
|
554
578
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
555
|
-
url
|
|
579
|
+
url,
|
|
556
580
|
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
557
581
|
body,
|
|
558
582
|
failedResponseHandler: xaiFailedResponseHandler,
|
|
559
|
-
successfulResponseHandler: (
|
|
583
|
+
successfulResponseHandler: async ({ response: response2 }) => {
|
|
584
|
+
const responseHeaders2 = (0, import_provider_utils3.extractResponseHeaders)(response2);
|
|
585
|
+
const contentType = response2.headers.get("content-type");
|
|
586
|
+
if (contentType == null ? void 0 : contentType.includes("application/json")) {
|
|
587
|
+
const responseBody = await response2.text();
|
|
588
|
+
const parsedError = await (0, import_provider_utils3.safeParseJSON)({
|
|
589
|
+
text: responseBody,
|
|
590
|
+
schema: xaiStreamErrorSchema
|
|
591
|
+
});
|
|
592
|
+
if (parsedError.success) {
|
|
593
|
+
throw new import_provider3.APICallError({
|
|
594
|
+
message: parsedError.value.error,
|
|
595
|
+
url,
|
|
596
|
+
requestBodyValues: body,
|
|
597
|
+
statusCode: 200,
|
|
598
|
+
responseHeaders: responseHeaders2,
|
|
599
|
+
responseBody,
|
|
600
|
+
isRetryable: parsedError.value.code === "The service is currently unavailable"
|
|
601
|
+
});
|
|
602
|
+
}
|
|
603
|
+
throw new import_provider3.APICallError({
|
|
604
|
+
message: "Invalid JSON response",
|
|
605
|
+
url,
|
|
606
|
+
requestBodyValues: body,
|
|
607
|
+
statusCode: 200,
|
|
608
|
+
responseHeaders: responseHeaders2,
|
|
609
|
+
responseBody
|
|
610
|
+
});
|
|
611
|
+
}
|
|
612
|
+
return (0, import_provider_utils3.createEventSourceResponseHandler)(xaiChatChunkSchema)({
|
|
613
|
+
response: response2,
|
|
614
|
+
url,
|
|
615
|
+
requestBodyValues: body
|
|
616
|
+
});
|
|
617
|
+
},
|
|
560
618
|
abortSignal: options.abortSignal,
|
|
561
619
|
fetch: this.config.fetch
|
|
562
620
|
});
|
|
@@ -596,12 +654,12 @@ var XaiChatLanguageModel = class {
|
|
|
596
654
|
isFirstChunk = false;
|
|
597
655
|
}
|
|
598
656
|
if (value.citations != null) {
|
|
599
|
-
for (const
|
|
657
|
+
for (const url2 of value.citations) {
|
|
600
658
|
controller.enqueue({
|
|
601
659
|
type: "source",
|
|
602
660
|
sourceType: "url",
|
|
603
661
|
id: self.config.generateId(),
|
|
604
|
-
url
|
|
662
|
+
url: url2
|
|
605
663
|
});
|
|
606
664
|
}
|
|
607
665
|
}
|
|
@@ -743,10 +801,12 @@ var xaiChatResponseSchema = import_v43.z.object({
|
|
|
743
801
|
index: import_v43.z.number(),
|
|
744
802
|
finish_reason: import_v43.z.string().nullish()
|
|
745
803
|
})
|
|
746
|
-
),
|
|
747
|
-
object: import_v43.z.literal("chat.completion"),
|
|
748
|
-
usage: xaiUsageSchema,
|
|
749
|
-
citations: import_v43.z.array(import_v43.z.string().url()).nullish()
|
|
804
|
+
).nullish(),
|
|
805
|
+
object: import_v43.z.literal("chat.completion").nullish(),
|
|
806
|
+
usage: xaiUsageSchema.nullish(),
|
|
807
|
+
citations: import_v43.z.array(import_v43.z.string().url()).nullish(),
|
|
808
|
+
code: import_v43.z.string().nullish(),
|
|
809
|
+
error: import_v43.z.string().nullish()
|
|
750
810
|
});
|
|
751
811
|
var xaiChatChunkSchema = import_v43.z.object({
|
|
752
812
|
id: import_v43.z.string().nullish(),
|
|
@@ -776,6 +836,10 @@ var xaiChatChunkSchema = import_v43.z.object({
|
|
|
776
836
|
usage: xaiUsageSchema.nullish(),
|
|
777
837
|
citations: import_v43.z.array(import_v43.z.string().url()).nullish()
|
|
778
838
|
});
|
|
839
|
+
var xaiStreamErrorSchema = import_v43.z.object({
|
|
840
|
+
code: import_v43.z.string(),
|
|
841
|
+
error: import_v43.z.string()
|
|
842
|
+
});
|
|
779
843
|
|
|
780
844
|
// src/responses/xai-responses-language-model.ts
|
|
781
845
|
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
@@ -1246,7 +1310,7 @@ async function convertToXaiResponsesInput({
|
|
|
1246
1310
|
}
|
|
1247
1311
|
|
|
1248
1312
|
// src/responses/xai-responses-prepare-tools.ts
|
|
1249
|
-
var
|
|
1313
|
+
var import_provider4 = require("@ai-sdk/provider");
|
|
1250
1314
|
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1251
1315
|
|
|
1252
1316
|
// src/tool/web-search.ts
|
|
@@ -1486,7 +1550,7 @@ async function prepareResponsesTools({
|
|
|
1486
1550
|
}
|
|
1487
1551
|
default: {
|
|
1488
1552
|
const _exhaustiveCheck = type;
|
|
1489
|
-
throw new
|
|
1553
|
+
throw new import_provider4.UnsupportedFunctionalityError({
|
|
1490
1554
|
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
1491
1555
|
});
|
|
1492
1556
|
}
|
|
@@ -1844,28 +1908,29 @@ var XaiResponsesLanguageModel = class {
|
|
|
1844
1908
|
if (event.type === "response.output_item.added" || event.type === "response.output_item.done") {
|
|
1845
1909
|
const part = event.item;
|
|
1846
1910
|
if (part.type === "web_search_call" || part.type === "x_search_call" || part.type === "code_interpreter_call" || part.type === "code_execution_call" || part.type === "view_image_call" || part.type === "view_x_video_call" || part.type === "custom_tool_call") {
|
|
1847
|
-
|
|
1911
|
+
const webSearchSubTools = [
|
|
1912
|
+
"web_search",
|
|
1913
|
+
"web_search_with_snippets",
|
|
1914
|
+
"browse_page"
|
|
1915
|
+
];
|
|
1916
|
+
const xSearchSubTools = [
|
|
1917
|
+
"x_user_search",
|
|
1918
|
+
"x_keyword_search",
|
|
1919
|
+
"x_semantic_search",
|
|
1920
|
+
"x_thread_fetch"
|
|
1921
|
+
];
|
|
1922
|
+
let toolName = (_e = part.name) != null ? _e : "";
|
|
1923
|
+
if (webSearchSubTools.includes((_f = part.name) != null ? _f : "") || part.type === "web_search_call") {
|
|
1924
|
+
toolName = webSearchToolName != null ? webSearchToolName : "web_search";
|
|
1925
|
+
} else if (xSearchSubTools.includes((_g = part.name) != null ? _g : "") || part.type === "x_search_call") {
|
|
1926
|
+
toolName = xSearchToolName != null ? xSearchToolName : "x_search";
|
|
1927
|
+
} else if (part.name === "code_execution" || part.type === "code_interpreter_call" || part.type === "code_execution_call") {
|
|
1928
|
+
toolName = codeExecutionToolName != null ? codeExecutionToolName : "code_execution";
|
|
1929
|
+
}
|
|
1930
|
+
const toolInput = part.type === "custom_tool_call" ? (_h = part.input) != null ? _h : "" : (_i = part.arguments) != null ? _i : "";
|
|
1931
|
+
const shouldEmit = part.type === "custom_tool_call" ? event.type === "response.output_item.done" : !seenToolCalls.has(part.id);
|
|
1932
|
+
if (shouldEmit && !seenToolCalls.has(part.id)) {
|
|
1848
1933
|
seenToolCalls.add(part.id);
|
|
1849
|
-
const webSearchSubTools = [
|
|
1850
|
-
"web_search",
|
|
1851
|
-
"web_search_with_snippets",
|
|
1852
|
-
"browse_page"
|
|
1853
|
-
];
|
|
1854
|
-
const xSearchSubTools = [
|
|
1855
|
-
"x_user_search",
|
|
1856
|
-
"x_keyword_search",
|
|
1857
|
-
"x_semantic_search",
|
|
1858
|
-
"x_thread_fetch"
|
|
1859
|
-
];
|
|
1860
|
-
let toolName = (_e = part.name) != null ? _e : "";
|
|
1861
|
-
if (webSearchSubTools.includes((_f = part.name) != null ? _f : "") || part.type === "web_search_call") {
|
|
1862
|
-
toolName = webSearchToolName != null ? webSearchToolName : "web_search";
|
|
1863
|
-
} else if (xSearchSubTools.includes((_g = part.name) != null ? _g : "") || part.type === "x_search_call") {
|
|
1864
|
-
toolName = xSearchToolName != null ? xSearchToolName : "x_search";
|
|
1865
|
-
} else if (part.name === "code_execution" || part.type === "code_interpreter_call" || part.type === "code_execution_call") {
|
|
1866
|
-
toolName = codeExecutionToolName != null ? codeExecutionToolName : "code_execution";
|
|
1867
|
-
}
|
|
1868
|
-
const toolInput = part.type === "custom_tool_call" ? (_h = part.input) != null ? _h : "" : (_i = part.arguments) != null ? _i : "";
|
|
1869
1934
|
controller.enqueue({
|
|
1870
1935
|
type: "tool-input-start",
|
|
1871
1936
|
id: part.id,
|
|
@@ -1900,12 +1965,12 @@ var XaiResponsesLanguageModel = class {
|
|
|
1900
1965
|
type: "text-start",
|
|
1901
1966
|
id: blockId
|
|
1902
1967
|
});
|
|
1968
|
+
controller.enqueue({
|
|
1969
|
+
type: "text-delta",
|
|
1970
|
+
id: blockId,
|
|
1971
|
+
delta: contentPart.text
|
|
1972
|
+
});
|
|
1903
1973
|
}
|
|
1904
|
-
controller.enqueue({
|
|
1905
|
-
type: "text-delta",
|
|
1906
|
-
id: blockId,
|
|
1907
|
-
delta: contentPart.text
|
|
1908
|
-
});
|
|
1909
1974
|
}
|
|
1910
1975
|
if (contentPart.annotations) {
|
|
1911
1976
|
for (const annotation of contentPart.annotations) {
|
|
@@ -2023,7 +2088,7 @@ var xaiTools = {
|
|
|
2023
2088
|
};
|
|
2024
2089
|
|
|
2025
2090
|
// src/version.ts
|
|
2026
|
-
var VERSION = true ? "2.0.
|
|
2091
|
+
var VERSION = true ? "2.0.49" : "0.0.0-test";
|
|
2027
2092
|
|
|
2028
2093
|
// src/xai-provider.ts
|
|
2029
2094
|
var xaiErrorStructure = {
|
|
@@ -2079,7 +2144,7 @@ function createXai(options = {}) {
|
|
|
2079
2144
|
provider.chat = createChatLanguageModel;
|
|
2080
2145
|
provider.responses = createResponsesLanguageModel;
|
|
2081
2146
|
provider.textEmbeddingModel = (modelId) => {
|
|
2082
|
-
throw new
|
|
2147
|
+
throw new import_provider5.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
|
|
2083
2148
|
};
|
|
2084
2149
|
provider.imageModel = createImageModel;
|
|
2085
2150
|
provider.image = createImageModel;
|