@ai-sdk/anthropic 3.0.28 → 3.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +11 -3
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +11 -3
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +10 -2
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +10 -2
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
- package/src/anthropic-messages-api.ts +1 -0
- package/src/anthropic-messages-language-model.ts +11 -0
- package/src/convert-anthropic-messages-usage.ts +1 -1
package/CHANGELOG.md
CHANGED
package/dist/index.js
CHANGED
|
@@ -32,7 +32,7 @@ var import_provider4 = require("@ai-sdk/provider");
|
|
|
32
32
|
var import_provider_utils23 = require("@ai-sdk/provider-utils");
|
|
33
33
|
|
|
34
34
|
// src/version.ts
|
|
35
|
-
var VERSION = true ? "3.0.
|
|
35
|
+
var VERSION = true ? "3.0.29" : "0.0.0-test";
|
|
36
36
|
|
|
37
37
|
// src/anthropic-messages-language-model.ts
|
|
38
38
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -688,7 +688,8 @@ var anthropicMessagesChunkSchema = (0, import_provider_utils2.lazySchema)(
|
|
|
688
688
|
usage: import_v42.z.looseObject({
|
|
689
689
|
input_tokens: import_v42.z.number().nullish(),
|
|
690
690
|
output_tokens: import_v42.z.number(),
|
|
691
|
-
cache_creation_input_tokens: import_v42.z.number().nullish()
|
|
691
|
+
cache_creation_input_tokens: import_v42.z.number().nullish(),
|
|
692
|
+
cache_read_input_tokens: import_v42.z.number().nullish()
|
|
692
693
|
})
|
|
693
694
|
}),
|
|
694
695
|
import_v42.z.object({
|
|
@@ -1348,7 +1349,7 @@ function convertAnthropicMessagesUsage(usage) {
|
|
|
1348
1349
|
},
|
|
1349
1350
|
outputTokens: {
|
|
1350
1351
|
total: outputTokens,
|
|
1351
|
-
text:
|
|
1352
|
+
text: void 0,
|
|
1352
1353
|
reasoning: void 0
|
|
1353
1354
|
},
|
|
1354
1355
|
raw: usage
|
|
@@ -3803,6 +3804,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3803
3804
|
usage.input_tokens = value.usage.input_tokens;
|
|
3804
3805
|
}
|
|
3805
3806
|
usage.output_tokens = value.usage.output_tokens;
|
|
3807
|
+
if (value.usage.cache_read_input_tokens != null) {
|
|
3808
|
+
usage.cache_read_input_tokens = value.usage.cache_read_input_tokens;
|
|
3809
|
+
}
|
|
3810
|
+
if (value.usage.cache_creation_input_tokens != null) {
|
|
3811
|
+
usage.cache_creation_input_tokens = value.usage.cache_creation_input_tokens;
|
|
3812
|
+
cacheCreationInputTokens = value.usage.cache_creation_input_tokens;
|
|
3813
|
+
}
|
|
3806
3814
|
finishReason = {
|
|
3807
3815
|
unified: mapAnthropicStopReason({
|
|
3808
3816
|
finishReason: value.delta.stop_reason,
|