@ai-sdk/anthropic 3.0.28 → 3.0.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/README.md +8 -0
- package/dist/index.js +11 -3
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +11 -3
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +10 -2
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +10 -2
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
- package/src/anthropic-messages-api.ts +1 -0
- package/src/anthropic-messages-language-model.ts +11 -0
- package/src/convert-anthropic-messages-usage.ts +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# @ai-sdk/anthropic
|
|
2
2
|
|
|
3
|
+
## 3.0.30
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 1524271: chore: add skill information to README files
|
|
8
|
+
|
|
9
|
+
## 3.0.29
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- b9d105f: Fix cache usage reporting for anthropic stream
|
|
14
|
+
|
|
3
15
|
## 3.0.28
|
|
4
16
|
|
|
5
17
|
### Patch Changes
|
package/README.md
CHANGED
|
@@ -10,6 +10,14 @@ The Anthropic provider is available in the `@ai-sdk/anthropic` module. You can i
|
|
|
10
10
|
npm i @ai-sdk/anthropic
|
|
11
11
|
```
|
|
12
12
|
|
|
13
|
+
## Skill for Coding Agents
|
|
14
|
+
|
|
15
|
+
If you use coding agents such as Claude Code or Cursor, we highly recommend adding the AI SDK skill to your repository:
|
|
16
|
+
|
|
17
|
+
```shell
|
|
18
|
+
npx skills add vercel/ai
|
|
19
|
+
```
|
|
20
|
+
|
|
13
21
|
## Provider Instance
|
|
14
22
|
|
|
15
23
|
You can import the default provider instance `anthropic` from `@ai-sdk/anthropic`:
|
package/dist/index.js
CHANGED
|
@@ -32,7 +32,7 @@ var import_provider4 = require("@ai-sdk/provider");
|
|
|
32
32
|
var import_provider_utils23 = require("@ai-sdk/provider-utils");
|
|
33
33
|
|
|
34
34
|
// src/version.ts
|
|
35
|
-
var VERSION = true ? "3.0.
|
|
35
|
+
var VERSION = true ? "3.0.30" : "0.0.0-test";
|
|
36
36
|
|
|
37
37
|
// src/anthropic-messages-language-model.ts
|
|
38
38
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -688,7 +688,8 @@ var anthropicMessagesChunkSchema = (0, import_provider_utils2.lazySchema)(
|
|
|
688
688
|
usage: import_v42.z.looseObject({
|
|
689
689
|
input_tokens: import_v42.z.number().nullish(),
|
|
690
690
|
output_tokens: import_v42.z.number(),
|
|
691
|
-
cache_creation_input_tokens: import_v42.z.number().nullish()
|
|
691
|
+
cache_creation_input_tokens: import_v42.z.number().nullish(),
|
|
692
|
+
cache_read_input_tokens: import_v42.z.number().nullish()
|
|
692
693
|
})
|
|
693
694
|
}),
|
|
694
695
|
import_v42.z.object({
|
|
@@ -1348,7 +1349,7 @@ function convertAnthropicMessagesUsage(usage) {
|
|
|
1348
1349
|
},
|
|
1349
1350
|
outputTokens: {
|
|
1350
1351
|
total: outputTokens,
|
|
1351
|
-
text:
|
|
1352
|
+
text: void 0,
|
|
1352
1353
|
reasoning: void 0
|
|
1353
1354
|
},
|
|
1354
1355
|
raw: usage
|
|
@@ -3803,6 +3804,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3803
3804
|
usage.input_tokens = value.usage.input_tokens;
|
|
3804
3805
|
}
|
|
3805
3806
|
usage.output_tokens = value.usage.output_tokens;
|
|
3807
|
+
if (value.usage.cache_read_input_tokens != null) {
|
|
3808
|
+
usage.cache_read_input_tokens = value.usage.cache_read_input_tokens;
|
|
3809
|
+
}
|
|
3810
|
+
if (value.usage.cache_creation_input_tokens != null) {
|
|
3811
|
+
usage.cache_creation_input_tokens = value.usage.cache_creation_input_tokens;
|
|
3812
|
+
cacheCreationInputTokens = value.usage.cache_creation_input_tokens;
|
|
3813
|
+
}
|
|
3806
3814
|
finishReason = {
|
|
3807
3815
|
unified: mapAnthropicStopReason({
|
|
3808
3816
|
finishReason: value.delta.stop_reason,
|