@ai-sdk/anthropic 2.1.0-beta.9 → 3.0.0-beta.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +42 -0
- package/dist/index.js +19 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -8
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +18 -7
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +18 -7
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/internal/index.js
CHANGED
|
@@ -460,7 +460,7 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
460
460
|
sendReasoning,
|
|
461
461
|
warnings
|
|
462
462
|
}) {
|
|
463
|
-
var _a, _b, _c, _d, _e;
|
|
463
|
+
var _a, _b, _c, _d, _e, _f;
|
|
464
464
|
const betas = /* @__PURE__ */ new Set();
|
|
465
465
|
const blocks = groupIntoBlocks(prompt);
|
|
466
466
|
let system = void 0;
|
|
@@ -636,6 +636,9 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
636
636
|
case "error-text":
|
|
637
637
|
contentValue = output.value;
|
|
638
638
|
break;
|
|
639
|
+
case "execution-denied":
|
|
640
|
+
contentValue = (_e = output.reason) != null ? _e : "Tool execution denied.";
|
|
641
|
+
break;
|
|
639
642
|
case "json":
|
|
640
643
|
case "error-json":
|
|
641
644
|
default:
|
|
@@ -670,7 +673,7 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
670
673
|
for (let k = 0; k < content.length; k++) {
|
|
671
674
|
const part = content[k];
|
|
672
675
|
const isLastContentPart = k === content.length - 1;
|
|
673
|
-
const cacheControl = (
|
|
676
|
+
const cacheControl = (_f = getCacheControl(part.providerOptions)) != null ? _f : isLastContentPart ? getCacheControl(message.providerOptions) : void 0;
|
|
674
677
|
switch (part.type) {
|
|
675
678
|
case "text": {
|
|
676
679
|
anthropicContent.push({
|
|
@@ -1207,7 +1210,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1207
1210
|
});
|
|
1208
1211
|
}
|
|
1209
1212
|
async doGenerate(options) {
|
|
1210
|
-
var _a, _b, _c, _d, _e;
|
|
1213
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1211
1214
|
const { args, warnings, betas, usesJsonResponseTool } = await this.getArgs(options);
|
|
1212
1215
|
const citationDocuments = this.extractCitationDocuments(options.prompt);
|
|
1213
1216
|
const {
|
|
@@ -1434,7 +1437,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1434
1437
|
providerMetadata: {
|
|
1435
1438
|
anthropic: {
|
|
1436
1439
|
usage: response.usage,
|
|
1437
|
-
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null
|
|
1440
|
+
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null,
|
|
1441
|
+
stopSequence: (_f = response.stop_sequence) != null ? _f : null
|
|
1438
1442
|
}
|
|
1439
1443
|
}
|
|
1440
1444
|
};
|
|
@@ -1463,6 +1467,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1463
1467
|
const contentBlocks = {};
|
|
1464
1468
|
let rawUsage = void 0;
|
|
1465
1469
|
let cacheCreationInputTokens = null;
|
|
1470
|
+
let stopSequence = null;
|
|
1466
1471
|
let blockType = void 0;
|
|
1467
1472
|
const generateId2 = this.generateId;
|
|
1468
1473
|
return {
|
|
@@ -1472,7 +1477,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1472
1477
|
controller.enqueue({ type: "stream-start", warnings });
|
|
1473
1478
|
},
|
|
1474
1479
|
transform(chunk, controller) {
|
|
1475
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
|
1480
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
1476
1481
|
if (options.includeRawChunks) {
|
|
1477
1482
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
1478
1483
|
}
|
|
@@ -1810,6 +1815,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1810
1815
|
finishReason: value.delta.stop_reason,
|
|
1811
1816
|
isJsonResponseFromTool: usesJsonResponseTool
|
|
1812
1817
|
});
|
|
1818
|
+
stopSequence = (_h = value.delta.stop_sequence) != null ? _h : null;
|
|
1813
1819
|
rawUsage = {
|
|
1814
1820
|
...rawUsage,
|
|
1815
1821
|
...value.usage
|
|
@@ -1824,7 +1830,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1824
1830
|
providerMetadata: {
|
|
1825
1831
|
anthropic: {
|
|
1826
1832
|
usage: rawUsage != null ? rawUsage : null,
|
|
1827
|
-
cacheCreationInputTokens
|
|
1833
|
+
cacheCreationInputTokens,
|
|
1834
|
+
stopSequence
|
|
1828
1835
|
}
|
|
1829
1836
|
}
|
|
1830
1837
|
});
|
|
@@ -1942,6 +1949,7 @@ var anthropicMessagesResponseSchema = import_v47.z.object({
|
|
|
1942
1949
|
])
|
|
1943
1950
|
),
|
|
1944
1951
|
stop_reason: import_v47.z.string().nullish(),
|
|
1952
|
+
stop_sequence: import_v47.z.string().nullish(),
|
|
1945
1953
|
usage: import_v47.z.looseObject({
|
|
1946
1954
|
input_tokens: import_v47.z.number(),
|
|
1947
1955
|
output_tokens: import_v47.z.number(),
|
|
@@ -2090,7 +2098,10 @@ var anthropicMessagesChunkSchema = import_v47.z.discriminatedUnion("type", [
|
|
|
2090
2098
|
}),
|
|
2091
2099
|
import_v47.z.object({
|
|
2092
2100
|
type: import_v47.z.literal("message_delta"),
|
|
2093
|
-
delta: import_v47.z.object({
|
|
2101
|
+
delta: import_v47.z.object({
|
|
2102
|
+
stop_reason: import_v47.z.string().nullish(),
|
|
2103
|
+
stop_sequence: import_v47.z.string().nullish()
|
|
2104
|
+
}),
|
|
2094
2105
|
usage: import_v47.z.looseObject({
|
|
2095
2106
|
output_tokens: import_v47.z.number(),
|
|
2096
2107
|
cache_creation_input_tokens: import_v47.z.number().nullish()
|