@ai-sdk/anthropic 3.0.0-beta.78 → 3.0.0-beta.80

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -11,7 +11,7 @@ import {
11
11
  } from "@ai-sdk/provider-utils";
12
12
 
13
13
  // src/version.ts
14
- var VERSION = true ? "3.0.0-beta.78" : "0.0.0-test";
14
+ var VERSION = true ? "3.0.0-beta.80" : "0.0.0-test";
15
15
 
16
16
  // src/anthropic-messages-language-model.ts
17
17
  import {
@@ -51,6 +51,29 @@ var anthropicFailedResponseHandler = createJsonErrorResponseHandler({
51
51
  errorToMessage: (data) => data.error.message
52
52
  });
53
53
 
54
+ // src/convert-anthropic-messages-usage.ts
55
+ function convertAnthropicMessagesUsage(usage) {
56
+ var _a, _b;
57
+ const inputTokens = usage.input_tokens;
58
+ const outputTokens = usage.output_tokens;
59
+ const cacheCreationTokens = (_a = usage.cache_creation_input_tokens) != null ? _a : 0;
60
+ const cacheReadTokens = (_b = usage.cache_read_input_tokens) != null ? _b : 0;
61
+ return {
62
+ inputTokens: {
63
+ total: inputTokens + cacheCreationTokens + cacheReadTokens,
64
+ noCache: inputTokens,
65
+ cacheRead: cacheReadTokens,
66
+ cacheWrite: cacheCreationTokens
67
+ },
68
+ outputTokens: {
69
+ total: outputTokens,
70
+ text: void 0,
71
+ reasoning: void 0
72
+ },
73
+ raw: usage
74
+ };
75
+ }
76
+
54
77
  // src/anthropic-messages-api.ts
55
78
  import { lazySchema as lazySchema2, zodSchema as zodSchema2 } from "@ai-sdk/provider-utils";
56
79
  import { z as z2 } from "zod/v4";
@@ -882,7 +905,7 @@ var webSearch_20250305OutputSchema = lazySchema4(
882
905
  z5.array(
883
906
  z5.object({
884
907
  url: z5.string(),
885
- title: z5.string(),
908
+ title: z5.string().nullable(),
886
909
  pageAge: z5.string().nullable(),
887
910
  encryptedContent: z5.string(),
888
911
  type: z5.literal("web_search_result")
@@ -931,7 +954,7 @@ var webFetch_20250910OutputSchema = lazySchema5(
931
954
  url: z6.string(),
932
955
  content: z6.object({
933
956
  type: z6.literal("document"),
934
- title: z6.string(),
957
+ title: z6.string().nullable(),
935
958
  citations: z6.object({ enabled: z6.boolean() }).optional(),
936
959
  source: z6.union([
937
960
  z6.object({
@@ -2514,7 +2537,7 @@ var AnthropicMessagesLanguageModel = class {
2514
2537
  });
2515
2538
  }
2516
2539
  async doGenerate(options) {
2517
- var _a, _b, _c, _d, _e, _f, _g, _h, _i;
2540
+ var _a, _b, _c, _d, _e, _f, _g, _h;
2518
2541
  const { args, warnings, betas, usesJsonResponseTool, toolNameMapping } = await this.getArgs({
2519
2542
  ...options,
2520
2543
  stream: false,
@@ -2813,16 +2836,11 @@ var AnthropicMessagesLanguageModel = class {
2813
2836
  finishReason: response.stop_reason,
2814
2837
  isJsonResponseFromTool
2815
2838
  }),
2816
- usage: {
2817
- inputTokens: response.usage.input_tokens,
2818
- outputTokens: response.usage.output_tokens,
2819
- totalTokens: response.usage.input_tokens + response.usage.output_tokens,
2820
- cachedInputTokens: (_b = response.usage.cache_read_input_tokens) != null ? _b : void 0
2821
- },
2839
+ usage: convertAnthropicMessagesUsage(response.usage),
2822
2840
  request: { body: args },
2823
2841
  response: {
2824
- id: (_c = response.id) != null ? _c : void 0,
2825
- modelId: (_d = response.model) != null ? _d : void 0,
2842
+ id: (_b = response.id) != null ? _b : void 0,
2843
+ modelId: (_c = response.model) != null ? _c : void 0,
2826
2844
  headers: responseHeaders,
2827
2845
  body: rawResponse
2828
2846
  },
@@ -2830,20 +2848,20 @@ var AnthropicMessagesLanguageModel = class {
2830
2848
  providerMetadata: {
2831
2849
  anthropic: {
2832
2850
  usage: response.usage,
2833
- cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null,
2834
- stopSequence: (_f = response.stop_sequence) != null ? _f : null,
2851
+ cacheCreationInputTokens: (_d = response.usage.cache_creation_input_tokens) != null ? _d : null,
2852
+ stopSequence: (_e = response.stop_sequence) != null ? _e : null,
2835
2853
  container: response.container ? {
2836
2854
  expiresAt: response.container.expires_at,
2837
2855
  id: response.container.id,
2838
- skills: (_h = (_g = response.container.skills) == null ? void 0 : _g.map((skill) => ({
2856
+ skills: (_g = (_f = response.container.skills) == null ? void 0 : _f.map((skill) => ({
2839
2857
  type: skill.type,
2840
2858
  skillId: skill.skill_id,
2841
2859
  version: skill.version
2842
- }))) != null ? _h : null
2860
+ }))) != null ? _g : null
2843
2861
  } : null,
2844
- contextManagement: (_i = mapAnthropicResponseContextManagement(
2862
+ contextManagement: (_h = mapAnthropicResponseContextManagement(
2845
2863
  response.context_management
2846
- )) != null ? _i : null
2864
+ )) != null ? _h : null
2847
2865
  }
2848
2866
  }
2849
2867
  };
@@ -2876,9 +2894,10 @@ var AnthropicMessagesLanguageModel = class {
2876
2894
  });
2877
2895
  let finishReason = "unknown";
2878
2896
  const usage = {
2879
- inputTokens: void 0,
2880
- outputTokens: void 0,
2881
- totalTokens: void 0
2897
+ input_tokens: 0,
2898
+ output_tokens: 0,
2899
+ cache_creation_input_tokens: 0,
2900
+ cache_read_input_tokens: 0
2882
2901
  };
2883
2902
  const contentBlocks = {};
2884
2903
  const mcpToolCalls = {};
@@ -2896,7 +2915,7 @@ var AnthropicMessagesLanguageModel = class {
2896
2915
  controller.enqueue({ type: "stream-start", warnings });
2897
2916
  },
2898
2917
  transform(chunk, controller) {
2899
- var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j;
2918
+ var _a2, _b2, _c, _d, _e, _f, _g, _h, _i;
2900
2919
  if (options.includeRawChunks) {
2901
2920
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
2902
2921
  }
@@ -3335,35 +3354,35 @@ var AnthropicMessagesLanguageModel = class {
3335
3354
  }
3336
3355
  }
3337
3356
  case "message_start": {
3338
- usage.inputTokens = value.message.usage.input_tokens;
3339
- usage.cachedInputTokens = (_b2 = value.message.usage.cache_read_input_tokens) != null ? _b2 : void 0;
3357
+ usage.input_tokens = value.message.usage.input_tokens;
3358
+ usage.cache_read_input_tokens = (_b2 = value.message.usage.cache_read_input_tokens) != null ? _b2 : 0;
3359
+ usage.cache_creation_input_tokens = (_c = value.message.usage.cache_creation_input_tokens) != null ? _c : 0;
3340
3360
  rawUsage = {
3341
3361
  ...value.message.usage
3342
3362
  };
3343
- cacheCreationInputTokens = (_c = value.message.usage.cache_creation_input_tokens) != null ? _c : null;
3363
+ cacheCreationInputTokens = (_d = value.message.usage.cache_creation_input_tokens) != null ? _d : null;
3344
3364
  controller.enqueue({
3345
3365
  type: "response-metadata",
3346
- id: (_d = value.message.id) != null ? _d : void 0,
3347
- modelId: (_e = value.message.model) != null ? _e : void 0
3366
+ id: (_e = value.message.id) != null ? _e : void 0,
3367
+ modelId: (_f = value.message.model) != null ? _f : void 0
3348
3368
  });
3349
3369
  return;
3350
3370
  }
3351
3371
  case "message_delta": {
3352
- usage.outputTokens = value.usage.output_tokens;
3353
- usage.totalTokens = ((_f = usage.inputTokens) != null ? _f : 0) + ((_g = value.usage.output_tokens) != null ? _g : 0);
3372
+ usage.output_tokens = value.usage.output_tokens;
3354
3373
  finishReason = mapAnthropicStopReason({
3355
3374
  finishReason: value.delta.stop_reason,
3356
3375
  isJsonResponseFromTool
3357
3376
  });
3358
- stopSequence = (_h = value.delta.stop_sequence) != null ? _h : null;
3377
+ stopSequence = (_g = value.delta.stop_sequence) != null ? _g : null;
3359
3378
  container = value.delta.container != null ? {
3360
3379
  expiresAt: value.delta.container.expires_at,
3361
3380
  id: value.delta.container.id,
3362
- skills: (_j = (_i = value.delta.container.skills) == null ? void 0 : _i.map((skill) => ({
3381
+ skills: (_i = (_h = value.delta.container.skills) == null ? void 0 : _h.map((skill) => ({
3363
3382
  type: skill.type,
3364
3383
  skillId: skill.skill_id,
3365
3384
  version: skill.version
3366
- }))) != null ? _j : null
3385
+ }))) != null ? _i : null
3367
3386
  } : null;
3368
3387
  if (value.delta.context_management) {
3369
3388
  contextManagement = mapAnthropicResponseContextManagement(
@@ -3380,7 +3399,7 @@ var AnthropicMessagesLanguageModel = class {
3380
3399
  controller.enqueue({
3381
3400
  type: "finish",
3382
3401
  finishReason,
3383
- usage,
3402
+ usage: convertAnthropicMessagesUsage(usage),
3384
3403
  providerMetadata: {
3385
3404
  anthropic: {
3386
3405
  usage: rawUsage != null ? rawUsage : null,