@ai-sdk/anthropic 3.0.0-beta.94 → 3.0.0-beta.96
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/dist/index.js +43 -29
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +43 -29
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +42 -28
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +42 -28
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.js
CHANGED
|
@@ -1695,6 +1695,9 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
1695
1695
|
case "tool": {
|
|
1696
1696
|
for (let i2 = 0; i2 < content.length; i2++) {
|
|
1697
1697
|
const part = content[i2];
|
|
1698
|
+
if (part.type === "tool-approval-response") {
|
|
1699
|
+
continue;
|
|
1700
|
+
}
|
|
1698
1701
|
const isLastPart = i2 === content.length - 1;
|
|
1699
1702
|
const cacheControl = (_d = validator.getCacheControl(part.providerOptions, {
|
|
1700
1703
|
type: "tool result part",
|
|
@@ -2268,11 +2271,10 @@ function mapAnthropicStopReason({
|
|
|
2268
2271
|
case "tool_use":
|
|
2269
2272
|
return isJsonResponseFromTool ? "stop" : "tool-calls";
|
|
2270
2273
|
case "max_tokens":
|
|
2271
|
-
return "length";
|
|
2272
2274
|
case "model_context_window_exceeded":
|
|
2273
2275
|
return "length";
|
|
2274
2276
|
default:
|
|
2275
|
-
return "
|
|
2277
|
+
return "other";
|
|
2276
2278
|
}
|
|
2277
2279
|
}
|
|
2278
2280
|
|
|
@@ -2688,7 +2690,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2688
2690
|
});
|
|
2689
2691
|
}
|
|
2690
2692
|
async doGenerate(options) {
|
|
2691
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
2693
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2692
2694
|
const { args, warnings, betas, usesJsonResponseTool, toolNameMapping } = await this.getArgs({
|
|
2693
2695
|
...options,
|
|
2694
2696
|
stream: false,
|
|
@@ -2997,15 +2999,18 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2997
2999
|
}
|
|
2998
3000
|
return {
|
|
2999
3001
|
content,
|
|
3000
|
-
finishReason:
|
|
3001
|
-
|
|
3002
|
-
|
|
3003
|
-
|
|
3002
|
+
finishReason: {
|
|
3003
|
+
unified: mapAnthropicStopReason({
|
|
3004
|
+
finishReason: response.stop_reason,
|
|
3005
|
+
isJsonResponseFromTool
|
|
3006
|
+
}),
|
|
3007
|
+
raw: (_c = response.stop_reason) != null ? _c : void 0
|
|
3008
|
+
},
|
|
3004
3009
|
usage: convertAnthropicMessagesUsage(response.usage),
|
|
3005
3010
|
request: { body: args },
|
|
3006
3011
|
response: {
|
|
3007
|
-
id: (
|
|
3008
|
-
modelId: (
|
|
3012
|
+
id: (_d = response.id) != null ? _d : void 0,
|
|
3013
|
+
modelId: (_e = response.model) != null ? _e : void 0,
|
|
3009
3014
|
headers: responseHeaders,
|
|
3010
3015
|
body: rawResponse
|
|
3011
3016
|
},
|
|
@@ -3013,20 +3018,20 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3013
3018
|
providerMetadata: {
|
|
3014
3019
|
anthropic: {
|
|
3015
3020
|
usage: response.usage,
|
|
3016
|
-
cacheCreationInputTokens: (
|
|
3017
|
-
stopSequence: (
|
|
3021
|
+
cacheCreationInputTokens: (_f = response.usage.cache_creation_input_tokens) != null ? _f : null,
|
|
3022
|
+
stopSequence: (_g = response.stop_sequence) != null ? _g : null,
|
|
3018
3023
|
container: response.container ? {
|
|
3019
3024
|
expiresAt: response.container.expires_at,
|
|
3020
3025
|
id: response.container.id,
|
|
3021
|
-
skills: (
|
|
3026
|
+
skills: (_i = (_h = response.container.skills) == null ? void 0 : _h.map((skill) => ({
|
|
3022
3027
|
type: skill.type,
|
|
3023
3028
|
skillId: skill.skill_id,
|
|
3024
3029
|
version: skill.version
|
|
3025
|
-
}))) != null ?
|
|
3030
|
+
}))) != null ? _i : null
|
|
3026
3031
|
} : null,
|
|
3027
|
-
contextManagement: (
|
|
3032
|
+
contextManagement: (_j = mapAnthropicResponseContextManagement(
|
|
3028
3033
|
response.context_management
|
|
3029
|
-
)) != null ?
|
|
3034
|
+
)) != null ? _j : null
|
|
3030
3035
|
}
|
|
3031
3036
|
}
|
|
3032
3037
|
};
|
|
@@ -3057,7 +3062,10 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3057
3062
|
abortSignal: options.abortSignal,
|
|
3058
3063
|
fetch: this.config.fetch
|
|
3059
3064
|
});
|
|
3060
|
-
let finishReason =
|
|
3065
|
+
let finishReason = {
|
|
3066
|
+
unified: "other",
|
|
3067
|
+
raw: void 0
|
|
3068
|
+
};
|
|
3061
3069
|
const usage = {
|
|
3062
3070
|
input_tokens: 0,
|
|
3063
3071
|
output_tokens: 0,
|
|
@@ -3080,7 +3088,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3080
3088
|
controller.enqueue({ type: "stream-start", warnings });
|
|
3081
3089
|
},
|
|
3082
3090
|
transform(chunk, controller) {
|
|
3083
|
-
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
3091
|
+
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
3084
3092
|
if (options.includeRawChunks) {
|
|
3085
3093
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
3086
3094
|
}
|
|
@@ -3563,10 +3571,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3563
3571
|
};
|
|
3564
3572
|
}
|
|
3565
3573
|
if (value.message.stop_reason != null) {
|
|
3566
|
-
finishReason =
|
|
3567
|
-
|
|
3568
|
-
|
|
3569
|
-
|
|
3574
|
+
finishReason = {
|
|
3575
|
+
unified: mapAnthropicStopReason({
|
|
3576
|
+
finishReason: value.message.stop_reason,
|
|
3577
|
+
isJsonResponseFromTool
|
|
3578
|
+
}),
|
|
3579
|
+
raw: value.message.stop_reason
|
|
3580
|
+
};
|
|
3570
3581
|
}
|
|
3571
3582
|
controller.enqueue({
|
|
3572
3583
|
type: "response-metadata",
|
|
@@ -3617,19 +3628,22 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3617
3628
|
}
|
|
3618
3629
|
case "message_delta": {
|
|
3619
3630
|
usage.output_tokens = value.usage.output_tokens;
|
|
3620
|
-
finishReason =
|
|
3621
|
-
|
|
3622
|
-
|
|
3623
|
-
|
|
3624
|
-
|
|
3631
|
+
finishReason = {
|
|
3632
|
+
unified: mapAnthropicStopReason({
|
|
3633
|
+
finishReason: value.delta.stop_reason,
|
|
3634
|
+
isJsonResponseFromTool
|
|
3635
|
+
}),
|
|
3636
|
+
raw: (_i = value.delta.stop_reason) != null ? _i : void 0
|
|
3637
|
+
};
|
|
3638
|
+
stopSequence = (_j = value.delta.stop_sequence) != null ? _j : null;
|
|
3625
3639
|
container = value.delta.container != null ? {
|
|
3626
3640
|
expiresAt: value.delta.container.expires_at,
|
|
3627
3641
|
id: value.delta.container.id,
|
|
3628
|
-
skills: (
|
|
3642
|
+
skills: (_l = (_k = value.delta.container.skills) == null ? void 0 : _k.map((skill) => ({
|
|
3629
3643
|
type: skill.type,
|
|
3630
3644
|
skillId: skill.skill_id,
|
|
3631
3645
|
version: skill.version
|
|
3632
|
-
}))) != null ?
|
|
3646
|
+
}))) != null ? _l : null
|
|
3633
3647
|
} : null;
|
|
3634
3648
|
if (value.delta.context_management) {
|
|
3635
3649
|
contextManagement = mapAnthropicResponseContextManagement(
|