@ai-sdk/anthropic 3.0.0-beta.94 → 3.0.0-beta.96
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/dist/index.js +43 -29
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +43 -29
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +42 -28
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +42 -28
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,23 @@
|
|
|
1
1
|
# @ai-sdk/anthropic
|
|
2
2
|
|
|
3
|
+
## 3.0.0-beta.96
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 2625a04: feat(openai); update spec for mcp approval
|
|
8
|
+
- Updated dependencies [2625a04]
|
|
9
|
+
- @ai-sdk/provider@3.0.0-beta.31
|
|
10
|
+
- @ai-sdk/provider-utils@4.0.0-beta.58
|
|
11
|
+
|
|
12
|
+
## 3.0.0-beta.95
|
|
13
|
+
|
|
14
|
+
### Patch Changes
|
|
15
|
+
|
|
16
|
+
- cbf52cd: feat: expose raw finish reason
|
|
17
|
+
- Updated dependencies [cbf52cd]
|
|
18
|
+
- @ai-sdk/provider@3.0.0-beta.30
|
|
19
|
+
- @ai-sdk/provider-utils@4.0.0-beta.57
|
|
20
|
+
|
|
3
21
|
## 3.0.0-beta.94
|
|
4
22
|
|
|
5
23
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -32,7 +32,7 @@ var import_provider4 = require("@ai-sdk/provider");
|
|
|
32
32
|
var import_provider_utils22 = require("@ai-sdk/provider-utils");
|
|
33
33
|
|
|
34
34
|
// src/version.ts
|
|
35
|
-
var VERSION = true ? "3.0.0-beta.
|
|
35
|
+
var VERSION = true ? "3.0.0-beta.96" : "0.0.0-test";
|
|
36
36
|
|
|
37
37
|
// src/anthropic-messages-language-model.ts
|
|
38
38
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -1703,6 +1703,9 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
1703
1703
|
case "tool": {
|
|
1704
1704
|
for (let i2 = 0; i2 < content.length; i2++) {
|
|
1705
1705
|
const part = content[i2];
|
|
1706
|
+
if (part.type === "tool-approval-response") {
|
|
1707
|
+
continue;
|
|
1708
|
+
}
|
|
1706
1709
|
const isLastPart = i2 === content.length - 1;
|
|
1707
1710
|
const cacheControl = (_d = validator.getCacheControl(part.providerOptions, {
|
|
1708
1711
|
type: "tool result part",
|
|
@@ -2276,11 +2279,10 @@ function mapAnthropicStopReason({
|
|
|
2276
2279
|
case "tool_use":
|
|
2277
2280
|
return isJsonResponseFromTool ? "stop" : "tool-calls";
|
|
2278
2281
|
case "max_tokens":
|
|
2279
|
-
return "length";
|
|
2280
2282
|
case "model_context_window_exceeded":
|
|
2281
2283
|
return "length";
|
|
2282
2284
|
default:
|
|
2283
|
-
return "
|
|
2285
|
+
return "other";
|
|
2284
2286
|
}
|
|
2285
2287
|
}
|
|
2286
2288
|
|
|
@@ -2696,7 +2698,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2696
2698
|
});
|
|
2697
2699
|
}
|
|
2698
2700
|
async doGenerate(options) {
|
|
2699
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
2701
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2700
2702
|
const { args, warnings, betas, usesJsonResponseTool, toolNameMapping } = await this.getArgs({
|
|
2701
2703
|
...options,
|
|
2702
2704
|
stream: false,
|
|
@@ -3005,15 +3007,18 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3005
3007
|
}
|
|
3006
3008
|
return {
|
|
3007
3009
|
content,
|
|
3008
|
-
finishReason:
|
|
3009
|
-
|
|
3010
|
-
|
|
3011
|
-
|
|
3010
|
+
finishReason: {
|
|
3011
|
+
unified: mapAnthropicStopReason({
|
|
3012
|
+
finishReason: response.stop_reason,
|
|
3013
|
+
isJsonResponseFromTool
|
|
3014
|
+
}),
|
|
3015
|
+
raw: (_c = response.stop_reason) != null ? _c : void 0
|
|
3016
|
+
},
|
|
3012
3017
|
usage: convertAnthropicMessagesUsage(response.usage),
|
|
3013
3018
|
request: { body: args },
|
|
3014
3019
|
response: {
|
|
3015
|
-
id: (
|
|
3016
|
-
modelId: (
|
|
3020
|
+
id: (_d = response.id) != null ? _d : void 0,
|
|
3021
|
+
modelId: (_e = response.model) != null ? _e : void 0,
|
|
3017
3022
|
headers: responseHeaders,
|
|
3018
3023
|
body: rawResponse
|
|
3019
3024
|
},
|
|
@@ -3021,20 +3026,20 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3021
3026
|
providerMetadata: {
|
|
3022
3027
|
anthropic: {
|
|
3023
3028
|
usage: response.usage,
|
|
3024
|
-
cacheCreationInputTokens: (
|
|
3025
|
-
stopSequence: (
|
|
3029
|
+
cacheCreationInputTokens: (_f = response.usage.cache_creation_input_tokens) != null ? _f : null,
|
|
3030
|
+
stopSequence: (_g = response.stop_sequence) != null ? _g : null,
|
|
3026
3031
|
container: response.container ? {
|
|
3027
3032
|
expiresAt: response.container.expires_at,
|
|
3028
3033
|
id: response.container.id,
|
|
3029
|
-
skills: (
|
|
3034
|
+
skills: (_i = (_h = response.container.skills) == null ? void 0 : _h.map((skill) => ({
|
|
3030
3035
|
type: skill.type,
|
|
3031
3036
|
skillId: skill.skill_id,
|
|
3032
3037
|
version: skill.version
|
|
3033
|
-
}))) != null ?
|
|
3038
|
+
}))) != null ? _i : null
|
|
3034
3039
|
} : null,
|
|
3035
|
-
contextManagement: (
|
|
3040
|
+
contextManagement: (_j = mapAnthropicResponseContextManagement(
|
|
3036
3041
|
response.context_management
|
|
3037
|
-
)) != null ?
|
|
3042
|
+
)) != null ? _j : null
|
|
3038
3043
|
}
|
|
3039
3044
|
}
|
|
3040
3045
|
};
|
|
@@ -3065,7 +3070,10 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3065
3070
|
abortSignal: options.abortSignal,
|
|
3066
3071
|
fetch: this.config.fetch
|
|
3067
3072
|
});
|
|
3068
|
-
let finishReason =
|
|
3073
|
+
let finishReason = {
|
|
3074
|
+
unified: "other",
|
|
3075
|
+
raw: void 0
|
|
3076
|
+
};
|
|
3069
3077
|
const usage = {
|
|
3070
3078
|
input_tokens: 0,
|
|
3071
3079
|
output_tokens: 0,
|
|
@@ -3088,7 +3096,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3088
3096
|
controller.enqueue({ type: "stream-start", warnings });
|
|
3089
3097
|
},
|
|
3090
3098
|
transform(chunk, controller) {
|
|
3091
|
-
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
3099
|
+
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
3092
3100
|
if (options.includeRawChunks) {
|
|
3093
3101
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
3094
3102
|
}
|
|
@@ -3571,10 +3579,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3571
3579
|
};
|
|
3572
3580
|
}
|
|
3573
3581
|
if (value.message.stop_reason != null) {
|
|
3574
|
-
finishReason =
|
|
3575
|
-
|
|
3576
|
-
|
|
3577
|
-
|
|
3582
|
+
finishReason = {
|
|
3583
|
+
unified: mapAnthropicStopReason({
|
|
3584
|
+
finishReason: value.message.stop_reason,
|
|
3585
|
+
isJsonResponseFromTool
|
|
3586
|
+
}),
|
|
3587
|
+
raw: value.message.stop_reason
|
|
3588
|
+
};
|
|
3578
3589
|
}
|
|
3579
3590
|
controller.enqueue({
|
|
3580
3591
|
type: "response-metadata",
|
|
@@ -3625,19 +3636,22 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3625
3636
|
}
|
|
3626
3637
|
case "message_delta": {
|
|
3627
3638
|
usage.output_tokens = value.usage.output_tokens;
|
|
3628
|
-
finishReason =
|
|
3629
|
-
|
|
3630
|
-
|
|
3631
|
-
|
|
3632
|
-
|
|
3639
|
+
finishReason = {
|
|
3640
|
+
unified: mapAnthropicStopReason({
|
|
3641
|
+
finishReason: value.delta.stop_reason,
|
|
3642
|
+
isJsonResponseFromTool
|
|
3643
|
+
}),
|
|
3644
|
+
raw: (_i = value.delta.stop_reason) != null ? _i : void 0
|
|
3645
|
+
};
|
|
3646
|
+
stopSequence = (_j = value.delta.stop_sequence) != null ? _j : null;
|
|
3633
3647
|
container = value.delta.container != null ? {
|
|
3634
3648
|
expiresAt: value.delta.container.expires_at,
|
|
3635
3649
|
id: value.delta.container.id,
|
|
3636
|
-
skills: (
|
|
3650
|
+
skills: (_l = (_k = value.delta.container.skills) == null ? void 0 : _k.map((skill) => ({
|
|
3637
3651
|
type: skill.type,
|
|
3638
3652
|
skillId: skill.skill_id,
|
|
3639
3653
|
version: skill.version
|
|
3640
|
-
}))) != null ?
|
|
3654
|
+
}))) != null ? _l : null
|
|
3641
3655
|
} : null;
|
|
3642
3656
|
if (value.delta.context_management) {
|
|
3643
3657
|
contextManagement = mapAnthropicResponseContextManagement(
|