@ai-sdk/anthropic 2.1.0-beta.8 → 3.0.0-beta.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +52 -0
- package/dist/index.js +19 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -8
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +18 -7
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +18 -7
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/internal/index.mjs
CHANGED
|
@@ -436,7 +436,7 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
436
436
|
sendReasoning,
|
|
437
437
|
warnings
|
|
438
438
|
}) {
|
|
439
|
-
var _a, _b, _c, _d, _e;
|
|
439
|
+
var _a, _b, _c, _d, _e, _f;
|
|
440
440
|
const betas = /* @__PURE__ */ new Set();
|
|
441
441
|
const blocks = groupIntoBlocks(prompt);
|
|
442
442
|
let system = void 0;
|
|
@@ -612,6 +612,9 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
612
612
|
case "error-text":
|
|
613
613
|
contentValue = output.value;
|
|
614
614
|
break;
|
|
615
|
+
case "execution-denied":
|
|
616
|
+
contentValue = (_e = output.reason) != null ? _e : "Tool execution denied.";
|
|
617
|
+
break;
|
|
615
618
|
case "json":
|
|
616
619
|
case "error-json":
|
|
617
620
|
default:
|
|
@@ -646,7 +649,7 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
646
649
|
for (let k = 0; k < content.length; k++) {
|
|
647
650
|
const part = content[k];
|
|
648
651
|
const isLastContentPart = k === content.length - 1;
|
|
649
|
-
const cacheControl = (
|
|
652
|
+
const cacheControl = (_f = getCacheControl(part.providerOptions)) != null ? _f : isLastContentPart ? getCacheControl(message.providerOptions) : void 0;
|
|
650
653
|
switch (part.type) {
|
|
651
654
|
case "text": {
|
|
652
655
|
anthropicContent.push({
|
|
@@ -1183,7 +1186,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1183
1186
|
});
|
|
1184
1187
|
}
|
|
1185
1188
|
async doGenerate(options) {
|
|
1186
|
-
var _a, _b, _c, _d, _e;
|
|
1189
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1187
1190
|
const { args, warnings, betas, usesJsonResponseTool } = await this.getArgs(options);
|
|
1188
1191
|
const citationDocuments = this.extractCitationDocuments(options.prompt);
|
|
1189
1192
|
const {
|
|
@@ -1410,7 +1413,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1410
1413
|
providerMetadata: {
|
|
1411
1414
|
anthropic: {
|
|
1412
1415
|
usage: response.usage,
|
|
1413
|
-
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null
|
|
1416
|
+
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null,
|
|
1417
|
+
stopSequence: (_f = response.stop_sequence) != null ? _f : null
|
|
1414
1418
|
}
|
|
1415
1419
|
}
|
|
1416
1420
|
};
|
|
@@ -1439,6 +1443,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1439
1443
|
const contentBlocks = {};
|
|
1440
1444
|
let rawUsage = void 0;
|
|
1441
1445
|
let cacheCreationInputTokens = null;
|
|
1446
|
+
let stopSequence = null;
|
|
1442
1447
|
let blockType = void 0;
|
|
1443
1448
|
const generateId2 = this.generateId;
|
|
1444
1449
|
return {
|
|
@@ -1448,7 +1453,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1448
1453
|
controller.enqueue({ type: "stream-start", warnings });
|
|
1449
1454
|
},
|
|
1450
1455
|
transform(chunk, controller) {
|
|
1451
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
|
1456
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
1452
1457
|
if (options.includeRawChunks) {
|
|
1453
1458
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
1454
1459
|
}
|
|
@@ -1786,6 +1791,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1786
1791
|
finishReason: value.delta.stop_reason,
|
|
1787
1792
|
isJsonResponseFromTool: usesJsonResponseTool
|
|
1788
1793
|
});
|
|
1794
|
+
stopSequence = (_h = value.delta.stop_sequence) != null ? _h : null;
|
|
1789
1795
|
rawUsage = {
|
|
1790
1796
|
...rawUsage,
|
|
1791
1797
|
...value.usage
|
|
@@ -1800,7 +1806,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1800
1806
|
providerMetadata: {
|
|
1801
1807
|
anthropic: {
|
|
1802
1808
|
usage: rawUsage != null ? rawUsage : null,
|
|
1803
|
-
cacheCreationInputTokens
|
|
1809
|
+
cacheCreationInputTokens,
|
|
1810
|
+
stopSequence
|
|
1804
1811
|
}
|
|
1805
1812
|
}
|
|
1806
1813
|
});
|
|
@@ -1918,6 +1925,7 @@ var anthropicMessagesResponseSchema = z7.object({
|
|
|
1918
1925
|
])
|
|
1919
1926
|
),
|
|
1920
1927
|
stop_reason: z7.string().nullish(),
|
|
1928
|
+
stop_sequence: z7.string().nullish(),
|
|
1921
1929
|
usage: z7.looseObject({
|
|
1922
1930
|
input_tokens: z7.number(),
|
|
1923
1931
|
output_tokens: z7.number(),
|
|
@@ -2066,7 +2074,10 @@ var anthropicMessagesChunkSchema = z7.discriminatedUnion("type", [
|
|
|
2066
2074
|
}),
|
|
2067
2075
|
z7.object({
|
|
2068
2076
|
type: z7.literal("message_delta"),
|
|
2069
|
-
delta: z7.object({
|
|
2077
|
+
delta: z7.object({
|
|
2078
|
+
stop_reason: z7.string().nullish(),
|
|
2079
|
+
stop_sequence: z7.string().nullish()
|
|
2080
|
+
}),
|
|
2070
2081
|
usage: z7.looseObject({
|
|
2071
2082
|
output_tokens: z7.number(),
|
|
2072
2083
|
cache_creation_input_tokens: z7.number().nullish()
|