@ai-sdk/xai 3.0.0-beta.65 → 3.0.0-beta.66
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/dist/index.js +22 -22
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +22 -22
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,14 @@
|
|
|
1
1
|
# @ai-sdk/xai
|
|
2
2
|
|
|
3
|
+
## 3.0.0-beta.66
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Updated dependencies [9549c9e]
|
|
8
|
+
- @ai-sdk/provider@3.0.0-beta.29
|
|
9
|
+
- @ai-sdk/openai-compatible@2.0.0-beta.57
|
|
10
|
+
- @ai-sdk/provider-utils@4.0.0-beta.56
|
|
11
|
+
|
|
3
12
|
## 3.0.0-beta.65
|
|
4
13
|
|
|
5
14
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -150,6 +150,27 @@ function convertToXaiChatMessages(prompt) {
|
|
|
150
150
|
return { messages, warnings };
|
|
151
151
|
}
|
|
152
152
|
|
|
153
|
+
// src/convert-xai-chat-usage.ts
|
|
154
|
+
function convertXaiChatUsage(usage) {
|
|
155
|
+
var _a, _b, _c, _d;
|
|
156
|
+
const cacheReadTokens = (_b = (_a = usage.prompt_tokens_details) == null ? void 0 : _a.cached_tokens) != null ? _b : 0;
|
|
157
|
+
const reasoningTokens = (_d = (_c = usage.completion_tokens_details) == null ? void 0 : _c.reasoning_tokens) != null ? _d : 0;
|
|
158
|
+
return {
|
|
159
|
+
inputTokens: {
|
|
160
|
+
total: usage.prompt_tokens,
|
|
161
|
+
noCache: usage.prompt_tokens - cacheReadTokens,
|
|
162
|
+
cacheRead: cacheReadTokens,
|
|
163
|
+
cacheWrite: void 0
|
|
164
|
+
},
|
|
165
|
+
outputTokens: {
|
|
166
|
+
total: usage.completion_tokens,
|
|
167
|
+
text: usage.completion_tokens - reasoningTokens,
|
|
168
|
+
reasoning: reasoningTokens
|
|
169
|
+
},
|
|
170
|
+
raw: usage
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
|
|
153
174
|
// src/get-response-metadata.ts
|
|
154
175
|
function getResponseMetadata({
|
|
155
176
|
id,
|
|
@@ -338,27 +359,6 @@ function prepareTools({
|
|
|
338
359
|
}
|
|
339
360
|
}
|
|
340
361
|
|
|
341
|
-
// src/convert-xai-chat-usage.ts
|
|
342
|
-
function convertXaiChatUsage(usage) {
|
|
343
|
-
var _a, _b, _c, _d;
|
|
344
|
-
const cacheReadTokens = (_b = (_a = usage.prompt_tokens_details) == null ? void 0 : _a.cached_tokens) != null ? _b : 0;
|
|
345
|
-
const reasoningTokens = (_d = (_c = usage.completion_tokens_details) == null ? void 0 : _c.reasoning_tokens) != null ? _d : 0;
|
|
346
|
-
return {
|
|
347
|
-
inputTokens: {
|
|
348
|
-
total: usage.prompt_tokens,
|
|
349
|
-
noCache: usage.prompt_tokens - cacheReadTokens,
|
|
350
|
-
cacheRead: cacheReadTokens,
|
|
351
|
-
cacheWrite: void 0
|
|
352
|
-
},
|
|
353
|
-
outputTokens: {
|
|
354
|
-
total: usage.completion_tokens,
|
|
355
|
-
text: usage.completion_tokens - reasoningTokens,
|
|
356
|
-
reasoning: reasoningTokens
|
|
357
|
-
},
|
|
358
|
-
raw: usage
|
|
359
|
-
};
|
|
360
|
-
}
|
|
361
|
-
|
|
362
362
|
// src/xai-chat-language-model.ts
|
|
363
363
|
var XaiChatLanguageModel = class {
|
|
364
364
|
constructor(modelId, config) {
|
|
@@ -2004,7 +2004,7 @@ var xaiTools = {
|
|
|
2004
2004
|
};
|
|
2005
2005
|
|
|
2006
2006
|
// src/version.ts
|
|
2007
|
-
var VERSION = true ? "3.0.0-beta.
|
|
2007
|
+
var VERSION = true ? "3.0.0-beta.66" : "0.0.0-test";
|
|
2008
2008
|
|
|
2009
2009
|
// src/xai-provider.ts
|
|
2010
2010
|
var xaiErrorStructure = {
|