@ai-sdk/anthropic 3.0.27 → 3.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +10 -2
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +10 -2
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +9 -1
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +9 -1
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
- package/src/anthropic-messages-api.ts +1 -0
- package/src/anthropic-messages-language-model.ts +11 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# @ai-sdk/anthropic
|
|
2
2
|
|
|
3
|
+
## 3.0.29
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- b9d105f: Fix cache usage reporting for anthropic stream
|
|
8
|
+
|
|
9
|
+
## 3.0.28
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- 2445da4: fix(provider/anthropic): populate outputTokens.text field in usage
|
|
14
|
+
|
|
3
15
|
## 3.0.27
|
|
4
16
|
|
|
5
17
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -32,7 +32,7 @@ var import_provider4 = require("@ai-sdk/provider");
|
|
|
32
32
|
var import_provider_utils23 = require("@ai-sdk/provider-utils");
|
|
33
33
|
|
|
34
34
|
// src/version.ts
|
|
35
|
-
var VERSION = true ? "3.0.
|
|
35
|
+
var VERSION = true ? "3.0.29" : "0.0.0-test";
|
|
36
36
|
|
|
37
37
|
// src/anthropic-messages-language-model.ts
|
|
38
38
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -688,7 +688,8 @@ var anthropicMessagesChunkSchema = (0, import_provider_utils2.lazySchema)(
|
|
|
688
688
|
usage: import_v42.z.looseObject({
|
|
689
689
|
input_tokens: import_v42.z.number().nullish(),
|
|
690
690
|
output_tokens: import_v42.z.number(),
|
|
691
|
-
cache_creation_input_tokens: import_v42.z.number().nullish()
|
|
691
|
+
cache_creation_input_tokens: import_v42.z.number().nullish(),
|
|
692
|
+
cache_read_input_tokens: import_v42.z.number().nullish()
|
|
692
693
|
})
|
|
693
694
|
}),
|
|
694
695
|
import_v42.z.object({
|
|
@@ -3803,6 +3804,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3803
3804
|
usage.input_tokens = value.usage.input_tokens;
|
|
3804
3805
|
}
|
|
3805
3806
|
usage.output_tokens = value.usage.output_tokens;
|
|
3807
|
+
if (value.usage.cache_read_input_tokens != null) {
|
|
3808
|
+
usage.cache_read_input_tokens = value.usage.cache_read_input_tokens;
|
|
3809
|
+
}
|
|
3810
|
+
if (value.usage.cache_creation_input_tokens != null) {
|
|
3811
|
+
usage.cache_creation_input_tokens = value.usage.cache_creation_input_tokens;
|
|
3812
|
+
cacheCreationInputTokens = value.usage.cache_creation_input_tokens;
|
|
3813
|
+
}
|
|
3806
3814
|
finishReason = {
|
|
3807
3815
|
unified: mapAnthropicStopReason({
|
|
3808
3816
|
finishReason: value.delta.stop_reason,
|