@ai-sdk/anthropic 2.0.0-canary.0 → 2.0.0-canary.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,23 @@
1
1
  # @ai-sdk/anthropic
2
2
 
3
+ ## 2.0.0-canary.2
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [c57e248]
8
+ - Updated dependencies [33f4a6a]
9
+ - @ai-sdk/provider@2.0.0-canary.1
10
+ - @ai-sdk/provider-utils@3.0.0-canary.2
11
+
12
+ ## 2.0.0-canary.1
13
+
14
+ ### Patch Changes
15
+
16
+ - Updated dependencies [060370c]
17
+ - Updated dependencies [0c0c0b3]
18
+ - Updated dependencies [63d791d]
19
+ - @ai-sdk/provider-utils@3.0.0-canary.1
20
+
3
21
  ## 2.0.0-canary.0
4
22
 
5
23
  ### Major Changes
package/dist/index.d.mts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { z } from 'zod';
2
- import { ProviderV1, LanguageModelV1 } from '@ai-sdk/provider';
2
+ import { ProviderV2, LanguageModelV2 } from '@ai-sdk/provider';
3
3
  import { FetchFunction } from '@ai-sdk/provider-utils';
4
4
 
5
5
  type AnthropicMessagesModelId = 'claude-3-7-sonnet-20250219' | 'claude-3-5-sonnet-latest' | 'claude-3-5-sonnet-20241022' | 'claude-3-5-sonnet-20240620' | 'claude-3-5-haiku-latest' | 'claude-3-5-haiku-20241022' | 'claude-3-opus-latest' | 'claude-3-opus-20240229' | 'claude-3-sonnet-20240229' | 'claude-3-haiku-20240307' | (string & {});
@@ -440,23 +440,23 @@ declare const anthropicTools: {
440
440
  computer_20250124: typeof computerTool_20250124;
441
441
  };
442
442
 
443
- interface AnthropicProvider extends ProviderV1 {
443
+ interface AnthropicProvider extends ProviderV2 {
444
444
  /**
445
445
  Creates a model for text generation.
446
446
  */
447
- (modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV1;
447
+ (modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV2;
448
448
  /**
449
449
  Creates a model for text generation.
450
450
  */
451
- languageModel(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV1;
451
+ languageModel(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV2;
452
452
  /**
453
453
  @deprecated Use `.languageModel()` instead.
454
454
  */
455
- chat(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV1;
455
+ chat(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV2;
456
456
  /**
457
457
  @deprecated Use `.languageModel()` instead.
458
458
  */
459
- messages(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV1;
459
+ messages(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV2;
460
460
  /**
461
461
  Anthropic-specific computer use tool.
462
462
  */
package/dist/index.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { z } from 'zod';
2
- import { ProviderV1, LanguageModelV1 } from '@ai-sdk/provider';
2
+ import { ProviderV2, LanguageModelV2 } from '@ai-sdk/provider';
3
3
  import { FetchFunction } from '@ai-sdk/provider-utils';
4
4
 
5
5
  type AnthropicMessagesModelId = 'claude-3-7-sonnet-20250219' | 'claude-3-5-sonnet-latest' | 'claude-3-5-sonnet-20241022' | 'claude-3-5-sonnet-20240620' | 'claude-3-5-haiku-latest' | 'claude-3-5-haiku-20241022' | 'claude-3-opus-latest' | 'claude-3-opus-20240229' | 'claude-3-sonnet-20240229' | 'claude-3-haiku-20240307' | (string & {});
@@ -440,23 +440,23 @@ declare const anthropicTools: {
440
440
  computer_20250124: typeof computerTool_20250124;
441
441
  };
442
442
 
443
- interface AnthropicProvider extends ProviderV1 {
443
+ interface AnthropicProvider extends ProviderV2 {
444
444
  /**
445
445
  Creates a model for text generation.
446
446
  */
447
- (modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV1;
447
+ (modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV2;
448
448
  /**
449
449
  Creates a model for text generation.
450
450
  */
451
- languageModel(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV1;
451
+ languageModel(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV2;
452
452
  /**
453
453
  @deprecated Use `.languageModel()` instead.
454
454
  */
455
- chat(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV1;
455
+ chat(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV2;
456
456
  /**
457
457
  @deprecated Use `.languageModel()` instead.
458
458
  */
459
- messages(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV1;
459
+ messages(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): LanguageModelV2;
460
460
  /**
461
461
  Anthropic-specific computer use tool.
462
462
  */
package/dist/index.js CHANGED
@@ -51,13 +51,15 @@ var anthropicFailedResponseHandler = (0, import_provider_utils.createJsonErrorRe
51
51
 
52
52
  // src/anthropic-prepare-tools.ts
53
53
  var import_provider = require("@ai-sdk/provider");
54
- function prepareTools(mode) {
55
- var _a;
56
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
54
+ function prepareTools({
55
+ tools,
56
+ toolChoice
57
+ }) {
58
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
57
59
  const toolWarnings = [];
58
60
  const betas = /* @__PURE__ */ new Set();
59
61
  if (tools == null) {
60
- return { tools: void 0, tool_choice: void 0, toolWarnings, betas };
62
+ return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
61
63
  }
62
64
  const anthropicTools2 = [];
63
65
  for (const tool of tools) {
@@ -129,11 +131,10 @@ function prepareTools(mode) {
129
131
  break;
130
132
  }
131
133
  }
132
- const toolChoice = mode.toolChoice;
133
134
  if (toolChoice == null) {
134
135
  return {
135
136
  tools: anthropicTools2,
136
- tool_choice: void 0,
137
+ toolChoice: void 0,
137
138
  toolWarnings,
138
139
  betas
139
140
  };
@@ -143,23 +144,23 @@ function prepareTools(mode) {
143
144
  case "auto":
144
145
  return {
145
146
  tools: anthropicTools2,
146
- tool_choice: { type: "auto" },
147
+ toolChoice: { type: "auto" },
147
148
  toolWarnings,
148
149
  betas
149
150
  };
150
151
  case "required":
151
152
  return {
152
153
  tools: anthropicTools2,
153
- tool_choice: { type: "any" },
154
+ toolChoice: { type: "any" },
154
155
  toolWarnings,
155
156
  betas
156
157
  };
157
158
  case "none":
158
- return { tools: void 0, tool_choice: void 0, toolWarnings, betas };
159
+ return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
159
160
  case "tool":
160
161
  return {
161
162
  tools: anthropicTools2,
162
- tool_choice: { type: "tool", name: toolChoice.toolName },
163
+ toolChoice: { type: "tool", name: toolChoice.toolName },
163
164
  toolWarnings,
164
165
  betas
165
166
  };
@@ -202,10 +203,10 @@ function convertToAnthropicMessagesPrompt({
202
203
  functionality: "Multiple system messages that are separated by user/assistant messages"
203
204
  });
204
205
  }
205
- system = block.messages.map(({ content, providerMetadata }) => ({
206
+ system = block.messages.map(({ content, providerOptions }) => ({
206
207
  type: "text",
207
208
  text: content,
208
- cache_control: getCacheControl(providerMetadata)
209
+ cache_control: getCacheControl(providerOptions)
209
210
  }));
210
211
  break;
211
212
  }
@@ -218,7 +219,7 @@ function convertToAnthropicMessagesPrompt({
218
219
  for (let j = 0; j < content.length; j++) {
219
220
  const part = content[j];
220
221
  const isLastPart = j === content.length - 1;
221
- const cacheControl = (_a = getCacheControl(part.providerMetadata)) != null ? _a : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
222
+ const cacheControl = (_a = getCacheControl(part.providerOptions)) != null ? _a : isLastPart ? getCacheControl(message.providerOptions) : void 0;
222
223
  switch (part.type) {
223
224
  case "text": {
224
225
  anthropicContent.push({
@@ -274,7 +275,7 @@ function convertToAnthropicMessagesPrompt({
274
275
  for (let i2 = 0; i2 < content.length; i2++) {
275
276
  const part = content[i2];
276
277
  const isLastPart = i2 === content.length - 1;
277
- const cacheControl = (_c = getCacheControl(part.providerMetadata)) != null ? _c : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
278
+ const cacheControl = (_c = getCacheControl(part.providerOptions)) != null ? _c : isLastPart ? getCacheControl(message.providerOptions) : void 0;
278
279
  const toolResultContent = part.content != null ? part.content.map((part2) => {
279
280
  var _a2;
280
281
  switch (part2.type) {
@@ -324,7 +325,7 @@ function convertToAnthropicMessagesPrompt({
324
325
  for (let k = 0; k < content.length; k++) {
325
326
  const part = content[k];
326
327
  const isLastContentPart = k === content.length - 1;
327
- const cacheControl = (_d = getCacheControl(part.providerMetadata)) != null ? _d : isLastContentPart ? getCacheControl(message.providerMetadata) : void 0;
328
+ const cacheControl = (_d = getCacheControl(part.providerOptions)) != null ? _d : isLastContentPart ? getCacheControl(message.providerOptions) : void 0;
328
329
  switch (part.type) {
329
330
  case "text": {
330
331
  anthropicContent.push({
@@ -455,7 +456,7 @@ function mapAnthropicStopReason(finishReason) {
455
456
  // src/anthropic-messages-language-model.ts
456
457
  var AnthropicMessagesLanguageModel = class {
457
458
  constructor(modelId, settings, config) {
458
- this.specificationVersion = "v1";
459
+ this.specificationVersion = "v2";
459
460
  this.defaultObjectGenerationMode = "tool";
460
461
  this.modelId = modelId;
461
462
  this.settings = settings;
@@ -468,7 +469,6 @@ var AnthropicMessagesLanguageModel = class {
468
469
  return this.config.supportsImageUrls;
469
470
  }
470
471
  async getArgs({
471
- mode,
472
472
  prompt,
473
473
  maxTokens = 4096,
474
474
  // 4096: max model output tokens TODO update default in v5
@@ -480,10 +480,11 @@ var AnthropicMessagesLanguageModel = class {
480
480
  stopSequences,
481
481
  responseFormat,
482
482
  seed,
483
- providerMetadata: providerOptions
483
+ tools,
484
+ toolChoice,
485
+ providerOptions
484
486
  }) {
485
487
  var _a, _b, _c;
486
- const type = mode.type;
487
488
  const warnings = [];
488
489
  if (frequencyPenalty != null) {
489
490
  warnings.push({
@@ -571,42 +572,21 @@ var AnthropicMessagesLanguageModel = class {
571
572
  }
572
573
  baseArgs.max_tokens = maxTokens + thinkingBudget;
573
574
  }
574
- switch (type) {
575
- case "regular": {
576
- const {
577
- tools,
578
- tool_choice,
579
- toolWarnings,
580
- betas: toolsBetas
581
- } = prepareTools(mode);
582
- return {
583
- args: { ...baseArgs, tools, tool_choice },
584
- warnings: [...warnings, ...toolWarnings],
585
- betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
586
- };
587
- }
588
- case "object-json": {
589
- throw new import_provider3.UnsupportedFunctionalityError({
590
- functionality: "json-mode object generation"
591
- });
592
- }
593
- case "object-tool": {
594
- const { name, description, parameters } = mode.tool;
595
- return {
596
- args: {
597
- ...baseArgs,
598
- tools: [{ name, description, input_schema: parameters }],
599
- tool_choice: { type: "tool", name }
600
- },
601
- warnings,
602
- betas: messagesBetas
603
- };
604
- }
605
- default: {
606
- const _exhaustiveCheck = type;
607
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
608
- }
609
- }
575
+ const {
576
+ tools: anthropicTools2,
577
+ toolChoice: anthropicToolChoice,
578
+ toolWarnings,
579
+ betas: toolsBetas
580
+ } = prepareTools({ tools, toolChoice });
581
+ return {
582
+ args: {
583
+ ...baseArgs,
584
+ tools: anthropicTools2,
585
+ tool_choice: anthropicToolChoice
586
+ },
587
+ warnings: [...warnings, ...toolWarnings],
588
+ betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
589
+ };
610
590
  }
611
591
  async getHeaders({
612
592
  betas,
@@ -1188,6 +1168,9 @@ function createAnthropic(options = {}) {
1188
1168
  provider.textEmbeddingModel = (modelId) => {
1189
1169
  throw new import_provider4.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
1190
1170
  };
1171
+ provider.imageModel = (modelId) => {
1172
+ throw new import_provider4.NoSuchModelError({ modelId, modelType: "imageModel" });
1173
+ };
1191
1174
  provider.tools = anthropicTools;
1192
1175
  return provider;
1193
1176
  }