ai 3.3.26 → 3.3.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/dist/index.d.mts +237 -57
- package/dist/index.d.ts +237 -57
- package/dist/index.js +246 -41
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +239 -37
- package/dist/index.mjs.map +1 -1
- package/package.json +8 -8
- package/rsc/dist/index.d.ts +8 -2
- package/rsc/dist/rsc-server.d.mts +8 -2
- package/rsc/dist/rsc-server.mjs +10 -4
- package/rsc/dist/rsc-server.mjs.map +1 -1
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "ai",
|
3
|
-
"version": "3.3.
|
3
|
+
"version": "3.3.28",
|
4
4
|
"description": "Vercel AI SDK - The AI Toolkit for TypeScript and JavaScript",
|
5
5
|
"license": "Apache-2.0",
|
6
6
|
"sideEffects": false,
|
@@ -59,13 +59,13 @@
|
|
59
59
|
}
|
60
60
|
},
|
61
61
|
"dependencies": {
|
62
|
-
"@ai-sdk/provider": "0.0.
|
63
|
-
"@ai-sdk/provider-utils": "1.0.
|
64
|
-
"@ai-sdk/react": "0.0.
|
65
|
-
"@ai-sdk/solid": "0.0.
|
66
|
-
"@ai-sdk/svelte": "0.0.
|
67
|
-
"@ai-sdk/ui-utils": "0.0.
|
68
|
-
"@ai-sdk/vue": "0.0.
|
62
|
+
"@ai-sdk/provider": "0.0.23",
|
63
|
+
"@ai-sdk/provider-utils": "1.0.18",
|
64
|
+
"@ai-sdk/react": "0.0.55",
|
65
|
+
"@ai-sdk/solid": "0.0.44",
|
66
|
+
"@ai-sdk/svelte": "0.0.46",
|
67
|
+
"@ai-sdk/ui-utils": "0.0.41",
|
68
|
+
"@ai-sdk/vue": "0.0.46",
|
69
69
|
"@opentelemetry/api": "1.9.0",
|
70
70
|
"eventsource-parser": "1.1.2",
|
71
71
|
"jsondiffpatch": "0.6.0",
|
package/rsc/dist/index.d.ts
CHANGED
@@ -181,7 +181,7 @@ type CallSettings = {
|
|
181
181
|
/**
|
182
182
|
Represents the number of tokens used in a prompt and completion.
|
183
183
|
*/
|
184
|
-
type
|
184
|
+
type LanguageModelUsage = {
|
185
185
|
/**
|
186
186
|
The number of tokens used in the prompt.
|
187
187
|
*/
|
@@ -294,6 +294,12 @@ interface ToolCallPart {
|
|
294
294
|
Arguments of the tool call. This is a JSON-serializable object that matches the tool's input schema.
|
295
295
|
*/
|
296
296
|
args: unknown;
|
297
|
+
/**
|
298
|
+
Additional provider-specific metadata. They are passed through
|
299
|
+
to the provider from the AI SDK and enable provider-specific
|
300
|
+
functionality that can be fully encapsulated in the provider.
|
301
|
+
*/
|
302
|
+
experimental_providerMetadata?: ProviderMetadata;
|
297
303
|
}
|
298
304
|
/**
|
299
305
|
Tool result content part of a prompt. It contains the result of the tool call with the matching ID.
|
@@ -482,7 +488,7 @@ declare function streamUI<TOOLS extends {
|
|
482
488
|
/**
|
483
489
|
* The token usage of the generated response.
|
484
490
|
*/
|
485
|
-
usage:
|
491
|
+
usage: LanguageModelUsage;
|
486
492
|
/**
|
487
493
|
* The final ui node that was generated.
|
488
494
|
*/
|
@@ -179,7 +179,7 @@ type CallSettings = {
|
|
179
179
|
/**
|
180
180
|
Represents the number of tokens used in a prompt and completion.
|
181
181
|
*/
|
182
|
-
type
|
182
|
+
type LanguageModelUsage = {
|
183
183
|
/**
|
184
184
|
The number of tokens used in the prompt.
|
185
185
|
*/
|
@@ -292,6 +292,12 @@ interface ToolCallPart {
|
|
292
292
|
Arguments of the tool call. This is a JSON-serializable object that matches the tool's input schema.
|
293
293
|
*/
|
294
294
|
args: unknown;
|
295
|
+
/**
|
296
|
+
Additional provider-specific metadata. They are passed through
|
297
|
+
to the provider from the AI SDK and enable provider-specific
|
298
|
+
functionality that can be fully encapsulated in the provider.
|
299
|
+
*/
|
300
|
+
experimental_providerMetadata?: ProviderMetadata;
|
295
301
|
}
|
296
302
|
/**
|
297
303
|
Tool result content part of a prompt. It contains the result of the tool call with the matching ID.
|
@@ -480,7 +486,7 @@ declare function streamUI<TOOLS extends {
|
|
480
486
|
/**
|
481
487
|
* The token usage of the generated response.
|
482
488
|
*/
|
483
|
-
usage:
|
489
|
+
usage: LanguageModelUsage;
|
484
490
|
/**
|
485
491
|
* The final ui node that was generated.
|
486
492
|
*/
|
package/rsc/dist/rsc-server.mjs
CHANGED
@@ -590,7 +590,13 @@ function convertToLanguageModelMessage(message, downloadedImages) {
|
|
590
590
|
content: message.content.filter(
|
591
591
|
// remove empty text parts:
|
592
592
|
(part) => part.type !== "text" || part.text !== ""
|
593
|
-
)
|
593
|
+
).map((part) => {
|
594
|
+
const { experimental_providerMetadata, ...rest } = part;
|
595
|
+
return {
|
596
|
+
...rest,
|
597
|
+
providerMetadata: experimental_providerMetadata
|
598
|
+
};
|
599
|
+
}),
|
594
600
|
providerMetadata: message.experimental_providerMetadata
|
595
601
|
};
|
596
602
|
}
|
@@ -953,8 +959,8 @@ function validatePrompt(prompt) {
|
|
953
959
|
throw new Error("unreachable");
|
954
960
|
}
|
955
961
|
|
956
|
-
// core/types/
|
957
|
-
function
|
962
|
+
// core/types/usage.ts
|
963
|
+
function calculateLanguageModelUsage(usage) {
|
958
964
|
return {
|
959
965
|
promptTokens: usage.promptTokens,
|
960
966
|
completionTokens: usage.completionTokens,
|
@@ -1434,7 +1440,7 @@ async function streamUI({
|
|
1434
1440
|
case "finish": {
|
1435
1441
|
onFinish == null ? void 0 : onFinish({
|
1436
1442
|
finishReason: value.finishReason,
|
1437
|
-
usage:
|
1443
|
+
usage: calculateLanguageModelUsage(value.usage),
|
1438
1444
|
value: ui.value,
|
1439
1445
|
warnings: result.warnings,
|
1440
1446
|
rawResponse: result.rawResponse
|