@ai-sdk/anthropic 2.1.0-beta.9 → 3.0.0-beta.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +42 -0
- package/dist/index.js +19 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -8
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +18 -7
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +18 -7
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +4 -4
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,47 @@
|
|
|
1
1
|
# @ai-sdk/anthropic
|
|
2
2
|
|
|
3
|
+
## 3.0.0-beta.14
|
|
4
|
+
|
|
5
|
+
### Major Changes
|
|
6
|
+
|
|
7
|
+
- dee8b05: ai SDK 6 beta
|
|
8
|
+
|
|
9
|
+
### Patch Changes
|
|
10
|
+
|
|
11
|
+
- Updated dependencies [dee8b05]
|
|
12
|
+
- @ai-sdk/provider@3.0.0-beta.6
|
|
13
|
+
- @ai-sdk/provider-utils@4.0.0-beta.10
|
|
14
|
+
|
|
15
|
+
## 2.1.0-beta.13
|
|
16
|
+
|
|
17
|
+
### Patch Changes
|
|
18
|
+
|
|
19
|
+
- Updated dependencies [521c537]
|
|
20
|
+
- @ai-sdk/provider-utils@3.1.0-beta.9
|
|
21
|
+
|
|
22
|
+
## 2.1.0-beta.12
|
|
23
|
+
|
|
24
|
+
### Patch Changes
|
|
25
|
+
|
|
26
|
+
- Updated dependencies [e06565c]
|
|
27
|
+
- @ai-sdk/provider-utils@3.1.0-beta.8
|
|
28
|
+
|
|
29
|
+
## 2.1.0-beta.11
|
|
30
|
+
|
|
31
|
+
### Patch Changes
|
|
32
|
+
|
|
33
|
+
- e8109d3: feat: tool execution approval
|
|
34
|
+
- Updated dependencies [046aa3b]
|
|
35
|
+
- Updated dependencies [e8109d3]
|
|
36
|
+
- @ai-sdk/provider@2.1.0-beta.5
|
|
37
|
+
- @ai-sdk/provider-utils@3.1.0-beta.7
|
|
38
|
+
|
|
39
|
+
## 2.1.0-beta.10
|
|
40
|
+
|
|
41
|
+
### Patch Changes
|
|
42
|
+
|
|
43
|
+
- dedf206: feat(provider/anthropic): expose stop_sequence in provider metadata
|
|
44
|
+
|
|
3
45
|
## 2.1.0-beta.9
|
|
4
46
|
|
|
5
47
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -41,7 +41,7 @@ var import_provider4 = require("@ai-sdk/provider");
|
|
|
41
41
|
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
|
42
42
|
|
|
43
43
|
// src/version.ts
|
|
44
|
-
var VERSION = true ? "
|
|
44
|
+
var VERSION = true ? "3.0.0-beta.14" : "0.0.0-test";
|
|
45
45
|
|
|
46
46
|
// src/anthropic-messages-language-model.ts
|
|
47
47
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -467,7 +467,7 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
467
467
|
sendReasoning,
|
|
468
468
|
warnings
|
|
469
469
|
}) {
|
|
470
|
-
var _a, _b, _c, _d, _e;
|
|
470
|
+
var _a, _b, _c, _d, _e, _f;
|
|
471
471
|
const betas = /* @__PURE__ */ new Set();
|
|
472
472
|
const blocks = groupIntoBlocks(prompt);
|
|
473
473
|
let system = void 0;
|
|
@@ -643,6 +643,9 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
643
643
|
case "error-text":
|
|
644
644
|
contentValue = output.value;
|
|
645
645
|
break;
|
|
646
|
+
case "execution-denied":
|
|
647
|
+
contentValue = (_e = output.reason) != null ? _e : "Tool execution denied.";
|
|
648
|
+
break;
|
|
646
649
|
case "json":
|
|
647
650
|
case "error-json":
|
|
648
651
|
default:
|
|
@@ -677,7 +680,7 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
677
680
|
for (let k = 0; k < content.length; k++) {
|
|
678
681
|
const part = content[k];
|
|
679
682
|
const isLastContentPart = k === content.length - 1;
|
|
680
|
-
const cacheControl = (
|
|
683
|
+
const cacheControl = (_f = getCacheControl(part.providerOptions)) != null ? _f : isLastContentPart ? getCacheControl(message.providerOptions) : void 0;
|
|
681
684
|
switch (part.type) {
|
|
682
685
|
case "text": {
|
|
683
686
|
anthropicContent.push({
|
|
@@ -1214,7 +1217,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1214
1217
|
});
|
|
1215
1218
|
}
|
|
1216
1219
|
async doGenerate(options) {
|
|
1217
|
-
var _a, _b, _c, _d, _e;
|
|
1220
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1218
1221
|
const { args, warnings, betas, usesJsonResponseTool } = await this.getArgs(options);
|
|
1219
1222
|
const citationDocuments = this.extractCitationDocuments(options.prompt);
|
|
1220
1223
|
const {
|
|
@@ -1441,7 +1444,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1441
1444
|
providerMetadata: {
|
|
1442
1445
|
anthropic: {
|
|
1443
1446
|
usage: response.usage,
|
|
1444
|
-
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null
|
|
1447
|
+
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null,
|
|
1448
|
+
stopSequence: (_f = response.stop_sequence) != null ? _f : null
|
|
1445
1449
|
}
|
|
1446
1450
|
}
|
|
1447
1451
|
};
|
|
@@ -1470,6 +1474,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1470
1474
|
const contentBlocks = {};
|
|
1471
1475
|
let rawUsage = void 0;
|
|
1472
1476
|
let cacheCreationInputTokens = null;
|
|
1477
|
+
let stopSequence = null;
|
|
1473
1478
|
let blockType = void 0;
|
|
1474
1479
|
const generateId3 = this.generateId;
|
|
1475
1480
|
return {
|
|
@@ -1479,7 +1484,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1479
1484
|
controller.enqueue({ type: "stream-start", warnings });
|
|
1480
1485
|
},
|
|
1481
1486
|
transform(chunk, controller) {
|
|
1482
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
|
1487
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
1483
1488
|
if (options.includeRawChunks) {
|
|
1484
1489
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
1485
1490
|
}
|
|
@@ -1817,6 +1822,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1817
1822
|
finishReason: value.delta.stop_reason,
|
|
1818
1823
|
isJsonResponseFromTool: usesJsonResponseTool
|
|
1819
1824
|
});
|
|
1825
|
+
stopSequence = (_h = value.delta.stop_sequence) != null ? _h : null;
|
|
1820
1826
|
rawUsage = {
|
|
1821
1827
|
...rawUsage,
|
|
1822
1828
|
...value.usage
|
|
@@ -1831,7 +1837,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1831
1837
|
providerMetadata: {
|
|
1832
1838
|
anthropic: {
|
|
1833
1839
|
usage: rawUsage != null ? rawUsage : null,
|
|
1834
|
-
cacheCreationInputTokens
|
|
1840
|
+
cacheCreationInputTokens,
|
|
1841
|
+
stopSequence
|
|
1835
1842
|
}
|
|
1836
1843
|
}
|
|
1837
1844
|
});
|
|
@@ -1949,6 +1956,7 @@ var anthropicMessagesResponseSchema = import_v47.z.object({
|
|
|
1949
1956
|
])
|
|
1950
1957
|
),
|
|
1951
1958
|
stop_reason: import_v47.z.string().nullish(),
|
|
1959
|
+
stop_sequence: import_v47.z.string().nullish(),
|
|
1952
1960
|
usage: import_v47.z.looseObject({
|
|
1953
1961
|
input_tokens: import_v47.z.number(),
|
|
1954
1962
|
output_tokens: import_v47.z.number(),
|
|
@@ -2097,7 +2105,10 @@ var anthropicMessagesChunkSchema = import_v47.z.discriminatedUnion("type", [
|
|
|
2097
2105
|
}),
|
|
2098
2106
|
import_v47.z.object({
|
|
2099
2107
|
type: import_v47.z.literal("message_delta"),
|
|
2100
|
-
delta: import_v47.z.object({
|
|
2108
|
+
delta: import_v47.z.object({
|
|
2109
|
+
stop_reason: import_v47.z.string().nullish(),
|
|
2110
|
+
stop_sequence: import_v47.z.string().nullish()
|
|
2111
|
+
}),
|
|
2101
2112
|
usage: import_v47.z.looseObject({
|
|
2102
2113
|
output_tokens: import_v47.z.number(),
|
|
2103
2114
|
cache_creation_input_tokens: import_v47.z.number().nullish()
|