@ai-sdk/anthropic 2.0.0-canary.1 → 2.0.0-canary.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -47,13 +47,15 @@ var anthropicFailedResponseHandler = (0, import_provider_utils.createJsonErrorRe
47
47
 
48
48
  // src/anthropic-prepare-tools.ts
49
49
  var import_provider = require("@ai-sdk/provider");
50
- function prepareTools(mode) {
51
- var _a;
52
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
50
+ function prepareTools({
51
+ tools,
52
+ toolChoice
53
+ }) {
54
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
53
55
  const toolWarnings = [];
54
56
  const betas = /* @__PURE__ */ new Set();
55
57
  if (tools == null) {
56
- return { tools: void 0, tool_choice: void 0, toolWarnings, betas };
58
+ return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
57
59
  }
58
60
  const anthropicTools2 = [];
59
61
  for (const tool of tools) {
@@ -125,11 +127,10 @@ function prepareTools(mode) {
125
127
  break;
126
128
  }
127
129
  }
128
- const toolChoice = mode.toolChoice;
129
130
  if (toolChoice == null) {
130
131
  return {
131
132
  tools: anthropicTools2,
132
- tool_choice: void 0,
133
+ toolChoice: void 0,
133
134
  toolWarnings,
134
135
  betas
135
136
  };
@@ -139,30 +140,30 @@ function prepareTools(mode) {
139
140
  case "auto":
140
141
  return {
141
142
  tools: anthropicTools2,
142
- tool_choice: { type: "auto" },
143
+ toolChoice: { type: "auto" },
143
144
  toolWarnings,
144
145
  betas
145
146
  };
146
147
  case "required":
147
148
  return {
148
149
  tools: anthropicTools2,
149
- tool_choice: { type: "any" },
150
+ toolChoice: { type: "any" },
150
151
  toolWarnings,
151
152
  betas
152
153
  };
153
154
  case "none":
154
- return { tools: void 0, tool_choice: void 0, toolWarnings, betas };
155
+ return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
155
156
  case "tool":
156
157
  return {
157
158
  tools: anthropicTools2,
158
- tool_choice: { type: "tool", name: toolChoice.toolName },
159
+ toolChoice: { type: "tool", name: toolChoice.toolName },
159
160
  toolWarnings,
160
161
  betas
161
162
  };
162
163
  default: {
163
164
  const _exhaustiveCheck = type;
164
165
  throw new import_provider.UnsupportedFunctionalityError({
165
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
166
+ functionality: `tool choice type: ${_exhaustiveCheck}`
166
167
  });
167
168
  }
168
169
  }
@@ -171,12 +172,12 @@ function prepareTools(mode) {
171
172
  // src/convert-to-anthropic-messages-prompt.ts
172
173
  var import_provider2 = require("@ai-sdk/provider");
173
174
  var import_provider_utils2 = require("@ai-sdk/provider-utils");
174
- function convertToAnthropicMessagesPrompt({
175
+ async function convertToAnthropicMessagesPrompt({
175
176
  prompt,
176
177
  sendReasoning,
177
178
  warnings
178
179
  }) {
179
- var _a, _b, _c, _d;
180
+ var _a, _b, _c;
180
181
  const betas = /* @__PURE__ */ new Set();
181
182
  const blocks = groupIntoBlocks(prompt);
182
183
  let system = void 0;
@@ -198,10 +199,10 @@ function convertToAnthropicMessagesPrompt({
198
199
  functionality: "Multiple system messages that are separated by user/assistant messages"
199
200
  });
200
201
  }
201
- system = block.messages.map(({ content, providerMetadata }) => ({
202
+ system = block.messages.map(({ content, providerOptions }) => ({
202
203
  type: "text",
203
204
  text: content,
204
- cache_control: getCacheControl(providerMetadata)
205
+ cache_control: getCacheControl(providerOptions)
205
206
  }));
206
207
  break;
207
208
  }
@@ -214,7 +215,7 @@ function convertToAnthropicMessagesPrompt({
214
215
  for (let j = 0; j < content.length; j++) {
215
216
  const part = content[j];
216
217
  const isLastPart = j === content.length - 1;
217
- const cacheControl = (_a = getCacheControl(part.providerMetadata)) != null ? _a : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
218
+ const cacheControl = (_a = getCacheControl(part.providerOptions)) != null ? _a : isLastPart ? getCacheControl(message.providerOptions) : void 0;
218
219
  switch (part.type) {
219
220
  case "text": {
220
221
  anthropicContent.push({
@@ -224,42 +225,39 @@ function convertToAnthropicMessagesPrompt({
224
225
  });
225
226
  break;
226
227
  }
227
- case "image": {
228
- anthropicContent.push({
229
- type: "image",
230
- source: part.image instanceof URL ? {
231
- type: "url",
232
- url: part.image.toString()
233
- } : {
234
- type: "base64",
235
- media_type: (_b = part.mimeType) != null ? _b : "image/jpeg",
236
- data: (0, import_provider_utils2.convertUint8ArrayToBase64)(part.image)
237
- },
238
- cache_control: cacheControl
239
- });
240
- break;
241
- }
242
228
  case "file": {
243
- if (part.data instanceof URL) {
244
- throw new import_provider2.UnsupportedFunctionalityError({
245
- functionality: "Image URLs in user messages"
229
+ if (part.mediaType.startsWith("image/")) {
230
+ anthropicContent.push({
231
+ type: "image",
232
+ source: part.data instanceof URL ? {
233
+ type: "url",
234
+ url: part.data.toString()
235
+ } : {
236
+ type: "base64",
237
+ media_type: part.mediaType === "image/*" ? "image/jpeg" : part.mediaType,
238
+ data: (0, import_provider_utils2.convertToBase64)(part.data)
239
+ },
240
+ cache_control: cacheControl
246
241
  });
247
- }
248
- if (part.mimeType !== "application/pdf") {
242
+ } else if (part.mediaType === "application/pdf") {
243
+ betas.add("pdfs-2024-09-25");
244
+ anthropicContent.push({
245
+ type: "document",
246
+ source: part.data instanceof URL ? {
247
+ type: "url",
248
+ url: part.data.toString()
249
+ } : {
250
+ type: "base64",
251
+ media_type: "application/pdf",
252
+ data: (0, import_provider_utils2.convertToBase64)(part.data)
253
+ },
254
+ cache_control: cacheControl
255
+ });
256
+ } else {
249
257
  throw new import_provider2.UnsupportedFunctionalityError({
250
- functionality: "Non-PDF files in user messages"
258
+ functionality: `media type: ${part.mediaType}`
251
259
  });
252
260
  }
253
- betas.add("pdfs-2024-09-25");
254
- anthropicContent.push({
255
- type: "document",
256
- source: {
257
- type: "base64",
258
- media_type: "application/pdf",
259
- data: part.data
260
- },
261
- cache_control: cacheControl
262
- });
263
261
  break;
264
262
  }
265
263
  }
@@ -270,7 +268,7 @@ function convertToAnthropicMessagesPrompt({
270
268
  for (let i2 = 0; i2 < content.length; i2++) {
271
269
  const part = content[i2];
272
270
  const isLastPart = i2 === content.length - 1;
273
- const cacheControl = (_c = getCacheControl(part.providerMetadata)) != null ? _c : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
271
+ const cacheControl = (_b = getCacheControl(part.providerOptions)) != null ? _b : isLastPart ? getCacheControl(message.providerOptions) : void 0;
274
272
  const toolResultContent = part.content != null ? part.content.map((part2) => {
275
273
  var _a2;
276
274
  switch (part2.type) {
@@ -285,7 +283,7 @@ function convertToAnthropicMessagesPrompt({
285
283
  type: "image",
286
284
  source: {
287
285
  type: "base64",
288
- media_type: (_a2 = part2.mimeType) != null ? _a2 : "image/jpeg",
286
+ media_type: (_a2 = part2.mediaType) != null ? _a2 : "image/jpeg",
289
287
  data: part2.data
290
288
  },
291
289
  cache_control: void 0
@@ -320,7 +318,7 @@ function convertToAnthropicMessagesPrompt({
320
318
  for (let k = 0; k < content.length; k++) {
321
319
  const part = content[k];
322
320
  const isLastContentPart = k === content.length - 1;
323
- const cacheControl = (_d = getCacheControl(part.providerMetadata)) != null ? _d : isLastContentPart ? getCacheControl(message.providerMetadata) : void 0;
321
+ const cacheControl = (_c = getCacheControl(part.providerOptions)) != null ? _c : isLastContentPart ? getCacheControl(message.providerOptions) : void 0;
324
322
  switch (part.type) {
325
323
  case "text": {
326
324
  anthropicContent.push({
@@ -337,12 +335,37 @@ function convertToAnthropicMessagesPrompt({
337
335
  }
338
336
  case "reasoning": {
339
337
  if (sendReasoning) {
340
- anthropicContent.push({
341
- type: "thinking",
342
- thinking: part.text,
343
- signature: part.signature,
344
- cache_control: cacheControl
338
+ const reasoningMetadata = await (0, import_provider_utils2.parseProviderOptions)({
339
+ provider: "anthropic",
340
+ providerOptions: part.providerOptions,
341
+ schema: anthropicReasoningMetadataSchema
345
342
  });
343
+ if (reasoningMetadata != null) {
344
+ if (reasoningMetadata.signature != null) {
345
+ anthropicContent.push({
346
+ type: "thinking",
347
+ thinking: part.text,
348
+ signature: reasoningMetadata.signature,
349
+ cache_control: cacheControl
350
+ });
351
+ } else if (reasoningMetadata.redactedData != null) {
352
+ anthropicContent.push({
353
+ type: "redacted_thinking",
354
+ data: reasoningMetadata.redactedData,
355
+ cache_control: cacheControl
356
+ });
357
+ } else {
358
+ warnings.push({
359
+ type: "other",
360
+ message: "unsupported reasoning metadata"
361
+ });
362
+ }
363
+ } else {
364
+ warnings.push({
365
+ type: "other",
366
+ message: "unsupported reasoning metadata"
367
+ });
368
+ }
346
369
  } else {
347
370
  warnings.push({
348
371
  type: "other",
@@ -351,14 +374,6 @@ function convertToAnthropicMessagesPrompt({
351
374
  }
352
375
  break;
353
376
  }
354
- case "redacted-reasoning": {
355
- anthropicContent.push({
356
- type: "redacted_thinking",
357
- data: part.data,
358
- cache_control: cacheControl
359
- });
360
- break;
361
- }
362
377
  case "tool-call": {
363
378
  anthropicContent.push({
364
379
  type: "tool_use",
@@ -377,7 +392,7 @@ function convertToAnthropicMessagesPrompt({
377
392
  }
378
393
  default: {
379
394
  const _exhaustiveCheck = type;
380
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
395
+ throw new Error(`content type: ${_exhaustiveCheck}`);
381
396
  }
382
397
  }
383
398
  }
@@ -452,21 +467,23 @@ function mapAnthropicStopReason(finishReason) {
452
467
  var AnthropicMessagesLanguageModel = class {
453
468
  constructor(modelId, settings, config) {
454
469
  this.specificationVersion = "v2";
455
- this.defaultObjectGenerationMode = "tool";
456
470
  this.modelId = modelId;
457
471
  this.settings = settings;
458
472
  this.config = config;
459
473
  }
474
+ supportsUrl(url) {
475
+ return url.protocol === "https:";
476
+ }
460
477
  get provider() {
461
478
  return this.config.provider;
462
479
  }
463
- get supportsImageUrls() {
464
- return this.config.supportsImageUrls;
480
+ async getSupportedUrls() {
481
+ var _a, _b, _c;
482
+ return (_c = (_b = (_a = this.config).getSupportedUrls) == null ? void 0 : _b.call(_a)) != null ? _c : {};
465
483
  }
466
484
  async getArgs({
467
- mode,
468
485
  prompt,
469
- maxTokens = 4096,
486
+ maxOutputTokens = 4096,
470
487
  // 4096: max model output tokens TODO update default in v5
471
488
  temperature,
472
489
  topP,
@@ -476,10 +493,11 @@ var AnthropicMessagesLanguageModel = class {
476
493
  stopSequences,
477
494
  responseFormat,
478
495
  seed,
479
- providerMetadata: providerOptions
496
+ tools,
497
+ toolChoice,
498
+ providerOptions
480
499
  }) {
481
500
  var _a, _b, _c;
482
- const type = mode.type;
483
501
  const warnings = [];
484
502
  if (frequencyPenalty != null) {
485
503
  warnings.push({
@@ -506,12 +524,12 @@ var AnthropicMessagesLanguageModel = class {
506
524
  details: "JSON response format is not supported."
507
525
  });
508
526
  }
509
- const { prompt: messagesPrompt, betas: messagesBetas } = convertToAnthropicMessagesPrompt({
527
+ const { prompt: messagesPrompt, betas: messagesBetas } = await convertToAnthropicMessagesPrompt({
510
528
  prompt,
511
529
  sendReasoning: (_a = this.settings.sendReasoning) != null ? _a : true,
512
530
  warnings
513
531
  });
514
- const anthropicOptions = (0, import_provider_utils3.parseProviderOptions)({
532
+ const anthropicOptions = await (0, import_provider_utils3.parseProviderOptions)({
515
533
  provider: "anthropic",
516
534
  providerOptions,
517
535
  schema: anthropicProviderOptionsSchema
@@ -522,7 +540,7 @@ var AnthropicMessagesLanguageModel = class {
522
540
  // model id:
523
541
  model: this.modelId,
524
542
  // standardized settings:
525
- max_tokens: maxTokens,
543
+ max_tokens: maxOutputTokens,
526
544
  temperature,
527
545
  top_k: topK,
528
546
  top_p: topP,
@@ -565,44 +583,23 @@ var AnthropicMessagesLanguageModel = class {
565
583
  details: "topP is not supported when thinking is enabled"
566
584
  });
567
585
  }
568
- baseArgs.max_tokens = maxTokens + thinkingBudget;
569
- }
570
- switch (type) {
571
- case "regular": {
572
- const {
573
- tools,
574
- tool_choice,
575
- toolWarnings,
576
- betas: toolsBetas
577
- } = prepareTools(mode);
578
- return {
579
- args: { ...baseArgs, tools, tool_choice },
580
- warnings: [...warnings, ...toolWarnings],
581
- betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
582
- };
583
- }
584
- case "object-json": {
585
- throw new import_provider3.UnsupportedFunctionalityError({
586
- functionality: "json-mode object generation"
587
- });
588
- }
589
- case "object-tool": {
590
- const { name, description, parameters } = mode.tool;
591
- return {
592
- args: {
593
- ...baseArgs,
594
- tools: [{ name, description, input_schema: parameters }],
595
- tool_choice: { type: "tool", name }
596
- },
597
- warnings,
598
- betas: messagesBetas
599
- };
600
- }
601
- default: {
602
- const _exhaustiveCheck = type;
603
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
604
- }
586
+ baseArgs.max_tokens = maxOutputTokens + thinkingBudget;
605
587
  }
588
+ const {
589
+ tools: anthropicTools2,
590
+ toolChoice: anthropicToolChoice,
591
+ toolWarnings,
592
+ betas: toolsBetas
593
+ } = prepareTools({ tools, toolChoice });
594
+ return {
595
+ args: {
596
+ ...baseArgs,
597
+ tools: anthropicTools2,
598
+ tool_choice: anthropicToolChoice
599
+ },
600
+ warnings: [...warnings, ...toolWarnings],
601
+ betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
602
+ };
606
603
  }
607
604
  async getHeaders({
608
605
  betas,
@@ -640,56 +637,62 @@ var AnthropicMessagesLanguageModel = class {
640
637
  abortSignal: options.abortSignal,
641
638
  fetch: this.config.fetch
642
639
  });
643
- const { messages: rawPrompt, ...rawSettings } = args;
644
- let text = "";
645
- for (const content of response.content) {
646
- if (content.type === "text") {
647
- text += content.text;
648
- }
649
- }
650
- let toolCalls = void 0;
651
- if (response.content.some((content) => content.type === "tool_use")) {
652
- toolCalls = [];
653
- for (const content of response.content) {
654
- if (content.type === "tool_use") {
655
- toolCalls.push({
640
+ const content = [];
641
+ for (const part of response.content) {
642
+ switch (part.type) {
643
+ case "text": {
644
+ content.push({ type: "text", text: part.text });
645
+ break;
646
+ }
647
+ case "thinking": {
648
+ content.push({
649
+ type: "reasoning",
650
+ text: part.thinking,
651
+ providerMetadata: {
652
+ anthropic: {
653
+ signature: part.signature
654
+ }
655
+ }
656
+ });
657
+ break;
658
+ }
659
+ case "redacted_thinking": {
660
+ content.push({
661
+ type: "reasoning",
662
+ text: "",
663
+ providerMetadata: {
664
+ anthropic: {
665
+ redactedData: part.data
666
+ }
667
+ }
668
+ });
669
+ break;
670
+ }
671
+ case "tool_use": {
672
+ content.push({
673
+ type: "tool-call",
656
674
  toolCallType: "function",
657
- toolCallId: content.id,
658
- toolName: content.name,
659
- args: JSON.stringify(content.input)
675
+ toolCallId: part.id,
676
+ toolName: part.name,
677
+ args: JSON.stringify(part.input)
660
678
  });
679
+ break;
661
680
  }
662
681
  }
663
682
  }
664
- const reasoning = response.content.filter(
665
- (content) => content.type === "redacted_thinking" || content.type === "thinking"
666
- ).map(
667
- (content) => content.type === "thinking" ? {
668
- type: "text",
669
- text: content.thinking,
670
- signature: content.signature
671
- } : {
672
- type: "redacted",
673
- data: content.data
674
- }
675
- );
676
683
  return {
677
- text,
678
- reasoning: reasoning.length > 0 ? reasoning : void 0,
679
- toolCalls,
684
+ content,
680
685
  finishReason: mapAnthropicStopReason(response.stop_reason),
681
686
  usage: {
682
- promptTokens: response.usage.input_tokens,
683
- completionTokens: response.usage.output_tokens
684
- },
685
- rawCall: { rawPrompt, rawSettings },
686
- rawResponse: {
687
- headers: responseHeaders,
688
- body: rawResponse
687
+ inputTokens: response.usage.input_tokens,
688
+ outputTokens: response.usage.output_tokens
689
689
  },
690
+ request: { body: args },
690
691
  response: {
691
692
  id: (_a = response.id) != null ? _a : void 0,
692
- modelId: (_b = response.model) != null ? _b : void 0
693
+ modelId: (_b = response.model) != null ? _b : void 0,
694
+ headers: responseHeaders,
695
+ body: rawResponse
693
696
  },
694
697
  warnings,
695
698
  providerMetadata: {
@@ -697,8 +700,7 @@ var AnthropicMessagesLanguageModel = class {
697
700
  cacheCreationInputTokens: (_c = response.usage.cache_creation_input_tokens) != null ? _c : null,
698
701
  cacheReadInputTokens: (_d = response.usage.cache_read_input_tokens) != null ? _d : null
699
702
  }
700
- },
701
- request: { body: JSON.stringify(args) }
703
+ }
702
704
  };
703
705
  }
704
706
  async doStream(options) {
@@ -715,11 +717,10 @@ var AnthropicMessagesLanguageModel = class {
715
717
  abortSignal: options.abortSignal,
716
718
  fetch: this.config.fetch
717
719
  });
718
- const { messages: rawPrompt, ...rawSettings } = args;
719
720
  let finishReason = "unknown";
720
721
  const usage = {
721
- promptTokens: Number.NaN,
722
- completionTokens: Number.NaN
722
+ inputTokens: void 0,
723
+ outputTokens: void 0
723
724
  };
724
725
  const toolCallContentBlocks = {};
725
726
  let providerMetadata = void 0;
@@ -727,6 +728,9 @@ var AnthropicMessagesLanguageModel = class {
727
728
  return {
728
729
  stream: response.pipeThrough(
729
730
  new TransformStream({
731
+ start(controller) {
732
+ controller.enqueue({ type: "stream-start", warnings });
733
+ },
730
734
  transform(chunk, controller) {
731
735
  var _a, _b, _c, _d;
732
736
  if (!chunk.success) {
@@ -748,9 +752,15 @@ var AnthropicMessagesLanguageModel = class {
748
752
  }
749
753
  case "redacted_thinking": {
750
754
  controller.enqueue({
751
- type: "redacted-reasoning",
752
- data: value.content_block.data
755
+ type: "reasoning",
756
+ text: "",
757
+ providerMetadata: {
758
+ anthropic: {
759
+ redactedData: value.content_block.data
760
+ }
761
+ }
753
762
  });
763
+ controller.enqueue({ type: "reasoning-part-finish" });
754
764
  return;
755
765
  }
756
766
  case "tool_use": {
@@ -789,24 +799,30 @@ var AnthropicMessagesLanguageModel = class {
789
799
  switch (deltaType) {
790
800
  case "text_delta": {
791
801
  controller.enqueue({
792
- type: "text-delta",
793
- textDelta: value.delta.text
802
+ type: "text",
803
+ text: value.delta.text
794
804
  });
795
805
  return;
796
806
  }
797
807
  case "thinking_delta": {
798
808
  controller.enqueue({
799
809
  type: "reasoning",
800
- textDelta: value.delta.thinking
810
+ text: value.delta.thinking
801
811
  });
802
812
  return;
803
813
  }
804
814
  case "signature_delta": {
805
815
  if (blockType === "thinking") {
806
816
  controller.enqueue({
807
- type: "reasoning-signature",
808
- signature: value.delta.signature
817
+ type: "reasoning",
818
+ text: "",
819
+ providerMetadata: {
820
+ anthropic: {
821
+ signature: value.delta.signature
822
+ }
823
+ }
809
824
  });
825
+ controller.enqueue({ type: "reasoning-part-finish" });
810
826
  }
811
827
  return;
812
828
  }
@@ -831,8 +847,8 @@ var AnthropicMessagesLanguageModel = class {
831
847
  }
832
848
  }
833
849
  case "message_start": {
834
- usage.promptTokens = value.message.usage.input_tokens;
835
- usage.completionTokens = value.message.usage.output_tokens;
850
+ usage.inputTokens = value.message.usage.input_tokens;
851
+ usage.outputTokens = value.message.usage.output_tokens;
836
852
  providerMetadata = {
837
853
  anthropic: {
838
854
  cacheCreationInputTokens: (_a = value.message.usage.cache_creation_input_tokens) != null ? _a : null,
@@ -847,7 +863,7 @@ var AnthropicMessagesLanguageModel = class {
847
863
  return;
848
864
  }
849
865
  case "message_delta": {
850
- usage.completionTokens = value.usage.output_tokens;
866
+ usage.outputTokens = value.usage.output_tokens;
851
867
  finishReason = mapAnthropicStopReason(value.delta.stop_reason);
852
868
  return;
853
869
  }
@@ -872,10 +888,8 @@ var AnthropicMessagesLanguageModel = class {
872
888
  }
873
889
  })
874
890
  ),
875
- rawCall: { rawPrompt, rawSettings },
876
- rawResponse: { headers: responseHeaders },
877
- warnings,
878
- request: { body: JSON.stringify(body) }
891
+ request: { body },
892
+ response: { headers: responseHeaders }
879
893
  };
880
894
  }
881
895
  };
@@ -1002,6 +1016,10 @@ var anthropicProviderOptionsSchema = import_zod2.z.object({
1002
1016
  budgetTokens: import_zod2.z.number().optional()
1003
1017
  }).optional()
1004
1018
  });
1019
+ var anthropicReasoningMetadataSchema = import_zod2.z.object({
1020
+ signature: import_zod2.z.string().optional(),
1021
+ redactedData: import_zod2.z.string().optional()
1022
+ });
1005
1023
 
1006
1024
  // src/anthropic-tools.ts
1007
1025
  var import_zod3 = require("zod");