@ai-sdk/anthropic 2.0.0-canary.1 → 2.0.0-canary.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -27,7 +27,7 @@ module.exports = __toCommonJS(internal_exports);
27
27
 
28
28
  // src/anthropic-messages-language-model.ts
29
29
  var import_provider3 = require("@ai-sdk/provider");
30
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
30
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
31
31
  var import_zod2 = require("zod");
32
32
 
33
33
  // src/anthropic-error.ts
@@ -47,13 +47,15 @@ var anthropicFailedResponseHandler = (0, import_provider_utils.createJsonErrorRe
47
47
 
48
48
  // src/anthropic-prepare-tools.ts
49
49
  var import_provider = require("@ai-sdk/provider");
50
- function prepareTools(mode) {
51
- var _a;
52
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
50
+ function prepareTools({
51
+ tools,
52
+ toolChoice
53
+ }) {
54
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
53
55
  const toolWarnings = [];
54
56
  const betas = /* @__PURE__ */ new Set();
55
57
  if (tools == null) {
56
- return { tools: void 0, tool_choice: void 0, toolWarnings, betas };
58
+ return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
57
59
  }
58
60
  const anthropicTools2 = [];
59
61
  for (const tool of tools) {
@@ -125,11 +127,10 @@ function prepareTools(mode) {
125
127
  break;
126
128
  }
127
129
  }
128
- const toolChoice = mode.toolChoice;
129
130
  if (toolChoice == null) {
130
131
  return {
131
132
  tools: anthropicTools2,
132
- tool_choice: void 0,
133
+ toolChoice: void 0,
133
134
  toolWarnings,
134
135
  betas
135
136
  };
@@ -139,30 +140,30 @@ function prepareTools(mode) {
139
140
  case "auto":
140
141
  return {
141
142
  tools: anthropicTools2,
142
- tool_choice: { type: "auto" },
143
+ toolChoice: { type: "auto" },
143
144
  toolWarnings,
144
145
  betas
145
146
  };
146
147
  case "required":
147
148
  return {
148
149
  tools: anthropicTools2,
149
- tool_choice: { type: "any" },
150
+ toolChoice: { type: "any" },
150
151
  toolWarnings,
151
152
  betas
152
153
  };
153
154
  case "none":
154
- return { tools: void 0, tool_choice: void 0, toolWarnings, betas };
155
+ return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
155
156
  case "tool":
156
157
  return {
157
158
  tools: anthropicTools2,
158
- tool_choice: { type: "tool", name: toolChoice.toolName },
159
+ toolChoice: { type: "tool", name: toolChoice.toolName },
159
160
  toolWarnings,
160
161
  betas
161
162
  };
162
163
  default: {
163
164
  const _exhaustiveCheck = type;
164
165
  throw new import_provider.UnsupportedFunctionalityError({
165
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
166
+ functionality: `tool choice type: ${_exhaustiveCheck}`
166
167
  });
167
168
  }
168
169
  }
@@ -170,13 +171,12 @@ function prepareTools(mode) {
170
171
 
171
172
  // src/convert-to-anthropic-messages-prompt.ts
172
173
  var import_provider2 = require("@ai-sdk/provider");
173
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
174
174
  function convertToAnthropicMessagesPrompt({
175
175
  prompt,
176
176
  sendReasoning,
177
177
  warnings
178
178
  }) {
179
- var _a, _b, _c, _d;
179
+ var _a, _b, _c;
180
180
  const betas = /* @__PURE__ */ new Set();
181
181
  const blocks = groupIntoBlocks(prompt);
182
182
  let system = void 0;
@@ -198,10 +198,10 @@ function convertToAnthropicMessagesPrompt({
198
198
  functionality: "Multiple system messages that are separated by user/assistant messages"
199
199
  });
200
200
  }
201
- system = block.messages.map(({ content, providerMetadata }) => ({
201
+ system = block.messages.map(({ content, providerOptions }) => ({
202
202
  type: "text",
203
203
  text: content,
204
- cache_control: getCacheControl(providerMetadata)
204
+ cache_control: getCacheControl(providerOptions)
205
205
  }));
206
206
  break;
207
207
  }
@@ -214,7 +214,7 @@ function convertToAnthropicMessagesPrompt({
214
214
  for (let j = 0; j < content.length; j++) {
215
215
  const part = content[j];
216
216
  const isLastPart = j === content.length - 1;
217
- const cacheControl = (_a = getCacheControl(part.providerMetadata)) != null ? _a : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
217
+ const cacheControl = (_a = getCacheControl(part.providerOptions)) != null ? _a : isLastPart ? getCacheControl(message.providerOptions) : void 0;
218
218
  switch (part.type) {
219
219
  case "text": {
220
220
  anthropicContent.push({
@@ -224,42 +224,39 @@ function convertToAnthropicMessagesPrompt({
224
224
  });
225
225
  break;
226
226
  }
227
- case "image": {
228
- anthropicContent.push({
229
- type: "image",
230
- source: part.image instanceof URL ? {
231
- type: "url",
232
- url: part.image.toString()
233
- } : {
234
- type: "base64",
235
- media_type: (_b = part.mimeType) != null ? _b : "image/jpeg",
236
- data: (0, import_provider_utils2.convertUint8ArrayToBase64)(part.image)
237
- },
238
- cache_control: cacheControl
239
- });
240
- break;
241
- }
242
227
  case "file": {
243
- if (part.data instanceof URL) {
244
- throw new import_provider2.UnsupportedFunctionalityError({
245
- functionality: "Image URLs in user messages"
228
+ if (part.mediaType.startsWith("image/")) {
229
+ anthropicContent.push({
230
+ type: "image",
231
+ source: part.data instanceof URL ? {
232
+ type: "url",
233
+ url: part.data.toString()
234
+ } : {
235
+ type: "base64",
236
+ media_type: part.mediaType === "image/*" ? "image/jpeg" : part.mediaType,
237
+ data: part.data
238
+ },
239
+ cache_control: cacheControl
246
240
  });
247
- }
248
- if (part.mimeType !== "application/pdf") {
241
+ } else if (part.mediaType === "application/pdf") {
242
+ betas.add("pdfs-2024-09-25");
243
+ anthropicContent.push({
244
+ type: "document",
245
+ source: part.data instanceof URL ? {
246
+ type: "url",
247
+ url: part.data.toString()
248
+ } : {
249
+ type: "base64",
250
+ media_type: "application/pdf",
251
+ data: part.data
252
+ },
253
+ cache_control: cacheControl
254
+ });
255
+ } else {
249
256
  throw new import_provider2.UnsupportedFunctionalityError({
250
- functionality: "Non-PDF files in user messages"
257
+ functionality: `media type: ${part.mediaType}`
251
258
  });
252
259
  }
253
- betas.add("pdfs-2024-09-25");
254
- anthropicContent.push({
255
- type: "document",
256
- source: {
257
- type: "base64",
258
- media_type: "application/pdf",
259
- data: part.data
260
- },
261
- cache_control: cacheControl
262
- });
263
260
  break;
264
261
  }
265
262
  }
@@ -270,7 +267,7 @@ function convertToAnthropicMessagesPrompt({
270
267
  for (let i2 = 0; i2 < content.length; i2++) {
271
268
  const part = content[i2];
272
269
  const isLastPart = i2 === content.length - 1;
273
- const cacheControl = (_c = getCacheControl(part.providerMetadata)) != null ? _c : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
270
+ const cacheControl = (_b = getCacheControl(part.providerOptions)) != null ? _b : isLastPart ? getCacheControl(message.providerOptions) : void 0;
274
271
  const toolResultContent = part.content != null ? part.content.map((part2) => {
275
272
  var _a2;
276
273
  switch (part2.type) {
@@ -285,7 +282,7 @@ function convertToAnthropicMessagesPrompt({
285
282
  type: "image",
286
283
  source: {
287
284
  type: "base64",
288
- media_type: (_a2 = part2.mimeType) != null ? _a2 : "image/jpeg",
285
+ media_type: (_a2 = part2.mediaType) != null ? _a2 : "image/jpeg",
289
286
  data: part2.data
290
287
  },
291
288
  cache_control: void 0
@@ -320,7 +317,7 @@ function convertToAnthropicMessagesPrompt({
320
317
  for (let k = 0; k < content.length; k++) {
321
318
  const part = content[k];
322
319
  const isLastContentPart = k === content.length - 1;
323
- const cacheControl = (_d = getCacheControl(part.providerMetadata)) != null ? _d : isLastContentPart ? getCacheControl(message.providerMetadata) : void 0;
320
+ const cacheControl = (_c = getCacheControl(part.providerOptions)) != null ? _c : isLastContentPart ? getCacheControl(message.providerOptions) : void 0;
324
321
  switch (part.type) {
325
322
  case "text": {
326
323
  anthropicContent.push({
@@ -377,7 +374,7 @@ function convertToAnthropicMessagesPrompt({
377
374
  }
378
375
  default: {
379
376
  const _exhaustiveCheck = type;
380
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
377
+ throw new Error(`content type: ${_exhaustiveCheck}`);
381
378
  }
382
379
  }
383
380
  }
@@ -457,6 +454,9 @@ var AnthropicMessagesLanguageModel = class {
457
454
  this.settings = settings;
458
455
  this.config = config;
459
456
  }
457
+ supportsUrl(url) {
458
+ return url.protocol === "https:";
459
+ }
460
460
  get provider() {
461
461
  return this.config.provider;
462
462
  }
@@ -464,7 +464,6 @@ var AnthropicMessagesLanguageModel = class {
464
464
  return this.config.supportsImageUrls;
465
465
  }
466
466
  async getArgs({
467
- mode,
468
467
  prompt,
469
468
  maxTokens = 4096,
470
469
  // 4096: max model output tokens TODO update default in v5
@@ -476,10 +475,11 @@ var AnthropicMessagesLanguageModel = class {
476
475
  stopSequences,
477
476
  responseFormat,
478
477
  seed,
479
- providerMetadata: providerOptions
478
+ tools,
479
+ toolChoice,
480
+ providerOptions
480
481
  }) {
481
482
  var _a, _b, _c;
482
- const type = mode.type;
483
483
  const warnings = [];
484
484
  if (frequencyPenalty != null) {
485
485
  warnings.push({
@@ -511,7 +511,7 @@ var AnthropicMessagesLanguageModel = class {
511
511
  sendReasoning: (_a = this.settings.sendReasoning) != null ? _a : true,
512
512
  warnings
513
513
  });
514
- const anthropicOptions = (0, import_provider_utils3.parseProviderOptions)({
514
+ const anthropicOptions = (0, import_provider_utils2.parseProviderOptions)({
515
515
  provider: "anthropic",
516
516
  providerOptions,
517
517
  schema: anthropicProviderOptionsSchema
@@ -567,49 +567,28 @@ var AnthropicMessagesLanguageModel = class {
567
567
  }
568
568
  baseArgs.max_tokens = maxTokens + thinkingBudget;
569
569
  }
570
- switch (type) {
571
- case "regular": {
572
- const {
573
- tools,
574
- tool_choice,
575
- toolWarnings,
576
- betas: toolsBetas
577
- } = prepareTools(mode);
578
- return {
579
- args: { ...baseArgs, tools, tool_choice },
580
- warnings: [...warnings, ...toolWarnings],
581
- betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
582
- };
583
- }
584
- case "object-json": {
585
- throw new import_provider3.UnsupportedFunctionalityError({
586
- functionality: "json-mode object generation"
587
- });
588
- }
589
- case "object-tool": {
590
- const { name, description, parameters } = mode.tool;
591
- return {
592
- args: {
593
- ...baseArgs,
594
- tools: [{ name, description, input_schema: parameters }],
595
- tool_choice: { type: "tool", name }
596
- },
597
- warnings,
598
- betas: messagesBetas
599
- };
600
- }
601
- default: {
602
- const _exhaustiveCheck = type;
603
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
604
- }
605
- }
570
+ const {
571
+ tools: anthropicTools2,
572
+ toolChoice: anthropicToolChoice,
573
+ toolWarnings,
574
+ betas: toolsBetas
575
+ } = prepareTools({ tools, toolChoice });
576
+ return {
577
+ args: {
578
+ ...baseArgs,
579
+ tools: anthropicTools2,
580
+ tool_choice: anthropicToolChoice
581
+ },
582
+ warnings: [...warnings, ...toolWarnings],
583
+ betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
584
+ };
606
585
  }
607
586
  async getHeaders({
608
587
  betas,
609
588
  headers
610
589
  }) {
611
- return (0, import_provider_utils3.combineHeaders)(
612
- await (0, import_provider_utils3.resolve)(this.config.headers),
590
+ return (0, import_provider_utils2.combineHeaders)(
591
+ await (0, import_provider_utils2.resolve)(this.config.headers),
613
592
  betas.size > 0 ? { "anthropic-beta": Array.from(betas).join(",") } : {},
614
593
  headers
615
594
  );
@@ -629,12 +608,12 @@ var AnthropicMessagesLanguageModel = class {
629
608
  responseHeaders,
630
609
  value: response,
631
610
  rawValue: rawResponse
632
- } = await (0, import_provider_utils3.postJsonToApi)({
611
+ } = await (0, import_provider_utils2.postJsonToApi)({
633
612
  url: this.buildRequestUrl(false),
634
613
  headers: await this.getHeaders({ betas, headers: options.headers }),
635
614
  body: this.transformRequestBody(args),
636
615
  failedResponseHandler: anthropicFailedResponseHandler,
637
- successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
616
+ successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
638
617
  anthropicMessagesResponseSchema
639
618
  ),
640
619
  abortSignal: options.abortSignal,
@@ -704,12 +683,12 @@ var AnthropicMessagesLanguageModel = class {
704
683
  async doStream(options) {
705
684
  const { args, warnings, betas } = await this.getArgs(options);
706
685
  const body = { ...args, stream: true };
707
- const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
686
+ const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
708
687
  url: this.buildRequestUrl(true),
709
688
  headers: await this.getHeaders({ betas, headers: options.headers }),
710
689
  body: this.transformRequestBody(body),
711
690
  failedResponseHandler: anthropicFailedResponseHandler,
712
- successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
691
+ successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
713
692
  anthropicMessagesChunkSchema
714
693
  ),
715
694
  abortSignal: options.abortSignal,