@ai-sdk/anthropic 3.0.28 → 3.0.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @ai-sdk/anthropic
2
2
 
3
+ ## 3.0.29
4
+
5
+ ### Patch Changes
6
+
7
+ - b9d105f: Fix cache usage reporting for anthropic stream
8
+
3
9
  ## 3.0.28
4
10
 
5
11
  ### Patch Changes
package/dist/index.js CHANGED
@@ -32,7 +32,7 @@ var import_provider4 = require("@ai-sdk/provider");
32
32
  var import_provider_utils23 = require("@ai-sdk/provider-utils");
33
33
 
34
34
  // src/version.ts
35
- var VERSION = true ? "3.0.28" : "0.0.0-test";
35
+ var VERSION = true ? "3.0.29" : "0.0.0-test";
36
36
 
37
37
  // src/anthropic-messages-language-model.ts
38
38
  var import_provider3 = require("@ai-sdk/provider");
@@ -688,7 +688,8 @@ var anthropicMessagesChunkSchema = (0, import_provider_utils2.lazySchema)(
688
688
  usage: import_v42.z.looseObject({
689
689
  input_tokens: import_v42.z.number().nullish(),
690
690
  output_tokens: import_v42.z.number(),
691
- cache_creation_input_tokens: import_v42.z.number().nullish()
691
+ cache_creation_input_tokens: import_v42.z.number().nullish(),
692
+ cache_read_input_tokens: import_v42.z.number().nullish()
692
693
  })
693
694
  }),
694
695
  import_v42.z.object({
@@ -1348,7 +1349,7 @@ function convertAnthropicMessagesUsage(usage) {
1348
1349
  },
1349
1350
  outputTokens: {
1350
1351
  total: outputTokens,
1351
- text: outputTokens,
1352
+ text: void 0,
1352
1353
  reasoning: void 0
1353
1354
  },
1354
1355
  raw: usage
@@ -3803,6 +3804,13 @@ var AnthropicMessagesLanguageModel = class {
3803
3804
  usage.input_tokens = value.usage.input_tokens;
3804
3805
  }
3805
3806
  usage.output_tokens = value.usage.output_tokens;
3807
+ if (value.usage.cache_read_input_tokens != null) {
3808
+ usage.cache_read_input_tokens = value.usage.cache_read_input_tokens;
3809
+ }
3810
+ if (value.usage.cache_creation_input_tokens != null) {
3811
+ usage.cache_creation_input_tokens = value.usage.cache_creation_input_tokens;
3812
+ cacheCreationInputTokens = value.usage.cache_creation_input_tokens;
3813
+ }
3806
3814
  finishReason = {
3807
3815
  unified: mapAnthropicStopReason({
3808
3816
  finishReason: value.delta.stop_reason,