@ai-sdk/anthropic 2.1.0-beta.9 → 3.0.0-beta.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +42 -0
- package/dist/index.js +19 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -8
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +18 -7
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +18 -7
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
|
@@ -10,7 +10,7 @@ import {
|
|
|
10
10
|
} from "@ai-sdk/provider-utils";
|
|
11
11
|
|
|
12
12
|
// src/version.ts
|
|
13
|
-
var VERSION = true ? "
|
|
13
|
+
var VERSION = true ? "3.0.0-beta.14" : "0.0.0-test";
|
|
14
14
|
|
|
15
15
|
// src/anthropic-messages-language-model.ts
|
|
16
16
|
import {
|
|
@@ -450,7 +450,7 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
450
450
|
sendReasoning,
|
|
451
451
|
warnings
|
|
452
452
|
}) {
|
|
453
|
-
var _a, _b, _c, _d, _e;
|
|
453
|
+
var _a, _b, _c, _d, _e, _f;
|
|
454
454
|
const betas = /* @__PURE__ */ new Set();
|
|
455
455
|
const blocks = groupIntoBlocks(prompt);
|
|
456
456
|
let system = void 0;
|
|
@@ -626,6 +626,9 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
626
626
|
case "error-text":
|
|
627
627
|
contentValue = output.value;
|
|
628
628
|
break;
|
|
629
|
+
case "execution-denied":
|
|
630
|
+
contentValue = (_e = output.reason) != null ? _e : "Tool execution denied.";
|
|
631
|
+
break;
|
|
629
632
|
case "json":
|
|
630
633
|
case "error-json":
|
|
631
634
|
default:
|
|
@@ -660,7 +663,7 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
660
663
|
for (let k = 0; k < content.length; k++) {
|
|
661
664
|
const part = content[k];
|
|
662
665
|
const isLastContentPart = k === content.length - 1;
|
|
663
|
-
const cacheControl = (
|
|
666
|
+
const cacheControl = (_f = getCacheControl(part.providerOptions)) != null ? _f : isLastContentPart ? getCacheControl(message.providerOptions) : void 0;
|
|
664
667
|
switch (part.type) {
|
|
665
668
|
case "text": {
|
|
666
669
|
anthropicContent.push({
|
|
@@ -1197,7 +1200,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1197
1200
|
});
|
|
1198
1201
|
}
|
|
1199
1202
|
async doGenerate(options) {
|
|
1200
|
-
var _a, _b, _c, _d, _e;
|
|
1203
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1201
1204
|
const { args, warnings, betas, usesJsonResponseTool } = await this.getArgs(options);
|
|
1202
1205
|
const citationDocuments = this.extractCitationDocuments(options.prompt);
|
|
1203
1206
|
const {
|
|
@@ -1424,7 +1427,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1424
1427
|
providerMetadata: {
|
|
1425
1428
|
anthropic: {
|
|
1426
1429
|
usage: response.usage,
|
|
1427
|
-
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null
|
|
1430
|
+
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null,
|
|
1431
|
+
stopSequence: (_f = response.stop_sequence) != null ? _f : null
|
|
1428
1432
|
}
|
|
1429
1433
|
}
|
|
1430
1434
|
};
|
|
@@ -1453,6 +1457,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1453
1457
|
const contentBlocks = {};
|
|
1454
1458
|
let rawUsage = void 0;
|
|
1455
1459
|
let cacheCreationInputTokens = null;
|
|
1460
|
+
let stopSequence = null;
|
|
1456
1461
|
let blockType = void 0;
|
|
1457
1462
|
const generateId3 = this.generateId;
|
|
1458
1463
|
return {
|
|
@@ -1462,7 +1467,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1462
1467
|
controller.enqueue({ type: "stream-start", warnings });
|
|
1463
1468
|
},
|
|
1464
1469
|
transform(chunk, controller) {
|
|
1465
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
|
1470
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
1466
1471
|
if (options.includeRawChunks) {
|
|
1467
1472
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
1468
1473
|
}
|
|
@@ -1800,6 +1805,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1800
1805
|
finishReason: value.delta.stop_reason,
|
|
1801
1806
|
isJsonResponseFromTool: usesJsonResponseTool
|
|
1802
1807
|
});
|
|
1808
|
+
stopSequence = (_h = value.delta.stop_sequence) != null ? _h : null;
|
|
1803
1809
|
rawUsage = {
|
|
1804
1810
|
...rawUsage,
|
|
1805
1811
|
...value.usage
|
|
@@ -1814,7 +1820,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1814
1820
|
providerMetadata: {
|
|
1815
1821
|
anthropic: {
|
|
1816
1822
|
usage: rawUsage != null ? rawUsage : null,
|
|
1817
|
-
cacheCreationInputTokens
|
|
1823
|
+
cacheCreationInputTokens,
|
|
1824
|
+
stopSequence
|
|
1818
1825
|
}
|
|
1819
1826
|
}
|
|
1820
1827
|
});
|
|
@@ -1932,6 +1939,7 @@ var anthropicMessagesResponseSchema = z7.object({
|
|
|
1932
1939
|
])
|
|
1933
1940
|
),
|
|
1934
1941
|
stop_reason: z7.string().nullish(),
|
|
1942
|
+
stop_sequence: z7.string().nullish(),
|
|
1935
1943
|
usage: z7.looseObject({
|
|
1936
1944
|
input_tokens: z7.number(),
|
|
1937
1945
|
output_tokens: z7.number(),
|
|
@@ -2080,7 +2088,10 @@ var anthropicMessagesChunkSchema = z7.discriminatedUnion("type", [
|
|
|
2080
2088
|
}),
|
|
2081
2089
|
z7.object({
|
|
2082
2090
|
type: z7.literal("message_delta"),
|
|
2083
|
-
delta: z7.object({
|
|
2091
|
+
delta: z7.object({
|
|
2092
|
+
stop_reason: z7.string().nullish(),
|
|
2093
|
+
stop_sequence: z7.string().nullish()
|
|
2094
|
+
}),
|
|
2084
2095
|
usage: z7.looseObject({
|
|
2085
2096
|
output_tokens: z7.number(),
|
|
2086
2097
|
cache_creation_input_tokens: z7.number().nullish()
|