@ai-sdk/xai 3.0.54 → 3.0.56
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.js +7 -5
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +7 -5
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -4
- package/src/convert-xai-chat-usage.ts +8 -2
- package/src/responses/convert-xai-responses-usage.ts +8 -2
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,19 @@
|
|
|
1
1
|
# @ai-sdk/xai
|
|
2
2
|
|
|
3
|
+
## 3.0.56
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 7ccb902: fix(provider/xai): handle inconsistent cached token reporting
|
|
8
|
+
|
|
9
|
+
## 3.0.55
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- Updated dependencies [4024a3a]
|
|
14
|
+
- @ai-sdk/provider-utils@4.0.15
|
|
15
|
+
- @ai-sdk/openai-compatible@2.0.30
|
|
16
|
+
|
|
3
17
|
## 3.0.54
|
|
4
18
|
|
|
5
19
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -159,10 +159,11 @@ function convertXaiChatUsage(usage) {
|
|
|
159
159
|
var _a, _b, _c, _d;
|
|
160
160
|
const cacheReadTokens = (_b = (_a = usage.prompt_tokens_details) == null ? void 0 : _a.cached_tokens) != null ? _b : 0;
|
|
161
161
|
const reasoningTokens = (_d = (_c = usage.completion_tokens_details) == null ? void 0 : _c.reasoning_tokens) != null ? _d : 0;
|
|
162
|
+
const promptTokensIncludesCached = cacheReadTokens <= usage.prompt_tokens;
|
|
162
163
|
return {
|
|
163
164
|
inputTokens: {
|
|
164
|
-
total: usage.prompt_tokens,
|
|
165
|
-
noCache: usage.prompt_tokens - cacheReadTokens,
|
|
165
|
+
total: promptTokensIncludesCached ? usage.prompt_tokens : usage.prompt_tokens + cacheReadTokens,
|
|
166
|
+
noCache: promptTokensIncludesCached ? usage.prompt_tokens - cacheReadTokens : usage.prompt_tokens,
|
|
166
167
|
cacheRead: cacheReadTokens,
|
|
167
168
|
cacheWrite: void 0
|
|
168
169
|
},
|
|
@@ -1186,10 +1187,11 @@ function convertXaiResponsesUsage(usage) {
|
|
|
1186
1187
|
var _a, _b, _c, _d;
|
|
1187
1188
|
const cacheReadTokens = (_b = (_a = usage.input_tokens_details) == null ? void 0 : _a.cached_tokens) != null ? _b : 0;
|
|
1188
1189
|
const reasoningTokens = (_d = (_c = usage.output_tokens_details) == null ? void 0 : _c.reasoning_tokens) != null ? _d : 0;
|
|
1190
|
+
const inputTokensIncludesCached = cacheReadTokens <= usage.input_tokens;
|
|
1189
1191
|
return {
|
|
1190
1192
|
inputTokens: {
|
|
1191
|
-
total: usage.input_tokens,
|
|
1192
|
-
noCache: usage.input_tokens - cacheReadTokens,
|
|
1193
|
+
total: inputTokensIncludesCached ? usage.input_tokens : usage.input_tokens + cacheReadTokens,
|
|
1194
|
+
noCache: inputTokensIncludesCached ? usage.input_tokens - cacheReadTokens : usage.input_tokens,
|
|
1193
1195
|
cacheRead: cacheReadTokens,
|
|
1194
1196
|
cacheWrite: void 0
|
|
1195
1197
|
},
|
|
@@ -2690,7 +2692,7 @@ var xaiTools = {
|
|
|
2690
2692
|
};
|
|
2691
2693
|
|
|
2692
2694
|
// src/version.ts
|
|
2693
|
-
var VERSION = true ? "3.0.
|
|
2695
|
+
var VERSION = true ? "3.0.56" : "0.0.0-test";
|
|
2694
2696
|
|
|
2695
2697
|
// src/xai-provider.ts
|
|
2696
2698
|
function createXai(options = {}) {
|