@ai-sdk/anthropic 1.2.12 → 2.0.0-alpha.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,16 +1,18 @@
1
1
  // src/anthropic-messages-language-model.ts
2
2
  import {
3
+ APICallError,
3
4
  UnsupportedFunctionalityError as UnsupportedFunctionalityError3
4
5
  } from "@ai-sdk/provider";
5
6
  import {
6
7
  combineHeaders,
7
8
  createEventSourceResponseHandler,
8
9
  createJsonResponseHandler,
9
- parseProviderOptions,
10
+ generateId,
11
+ parseProviderOptions as parseProviderOptions2,
10
12
  postJsonToApi,
11
13
  resolve
12
14
  } from "@ai-sdk/provider-utils";
13
- import { z as z2 } from "zod";
15
+ import { z as z3 } from "zod";
14
16
 
15
17
  // src/anthropic-error.ts
16
18
  import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
@@ -27,20 +29,76 @@ var anthropicFailedResponseHandler = createJsonErrorResponseHandler({
27
29
  errorToMessage: (data) => data.error.message
28
30
  });
29
31
 
32
+ // src/anthropic-messages-options.ts
33
+ import { z as z2 } from "zod";
34
+ var webSearchLocationSchema = z2.object({
35
+ type: z2.literal("approximate"),
36
+ city: z2.string().optional(),
37
+ region: z2.string().optional(),
38
+ country: z2.string(),
39
+ timezone: z2.string().optional()
40
+ });
41
+ var anthropicProviderOptions = z2.object({
42
+ /**
43
+ Include reasoning content in requests sent to the model. Defaults to `true`.
44
+
45
+ If you are experiencing issues with the model handling requests involving
46
+ */
47
+ sendReasoning: z2.boolean().optional(),
48
+ thinking: z2.object({
49
+ type: z2.union([z2.literal("enabled"), z2.literal("disabled")]),
50
+ budgetTokens: z2.number().optional()
51
+ }).optional(),
52
+ /**
53
+ * Web search tool configuration for Claude models that support it.
54
+ * When provided, automatically adds the web search tool to the request.
55
+ */
56
+ webSearch: z2.object({
57
+ /**
58
+ * Limit the number of searches per request (optional)
59
+ * Defaults to 5 if not specified
60
+ */
61
+ maxUses: z2.number().min(1).max(20).optional(),
62
+ /**
63
+ * Only include results from these domains (optional)
64
+ * Cannot be used with blockedDomains
65
+ */
66
+ allowedDomains: z2.array(z2.string()).optional(),
67
+ /**
68
+ * Never include results from these domains (optional)
69
+ * Cannot be used with allowedDomains
70
+ */
71
+ blockedDomains: z2.array(z2.string()).optional(),
72
+ /**
73
+ * Localize search results based on user location (optional)
74
+ */
75
+ userLocation: webSearchLocationSchema.optional()
76
+ }).optional()
77
+ });
78
+
30
79
  // src/anthropic-prepare-tools.ts
31
80
  import {
32
81
  UnsupportedFunctionalityError
33
82
  } from "@ai-sdk/provider";
34
- function prepareTools(mode) {
35
- var _a;
36
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
83
+ function isWebSearchTool(tool) {
84
+ return typeof tool === "object" && tool !== null && "type" in tool && tool.type === "web_search_20250305";
85
+ }
86
+ function prepareTools({
87
+ tools,
88
+ toolChoice
89
+ }) {
90
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
37
91
  const toolWarnings = [];
38
92
  const betas = /* @__PURE__ */ new Set();
39
93
  if (tools == null) {
40
- return { tools: void 0, tool_choice: void 0, toolWarnings, betas };
94
+ return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
41
95
  }
42
96
  const anthropicTools2 = [];
43
97
  for (const tool of tools) {
98
+ if (isWebSearchTool(tool)) {
99
+ anthropicTools2.push(tool);
100
+ continue;
101
+ }
44
102
  switch (tool.type) {
45
103
  case "function":
46
104
  anthropicTools2.push({
@@ -109,11 +167,10 @@ function prepareTools(mode) {
109
167
  break;
110
168
  }
111
169
  }
112
- const toolChoice = mode.toolChoice;
113
170
  if (toolChoice == null) {
114
171
  return {
115
172
  tools: anthropicTools2,
116
- tool_choice: void 0,
173
+ toolChoice: void 0,
117
174
  toolWarnings,
118
175
  betas
119
176
  };
@@ -123,30 +180,30 @@ function prepareTools(mode) {
123
180
  case "auto":
124
181
  return {
125
182
  tools: anthropicTools2,
126
- tool_choice: { type: "auto" },
183
+ toolChoice: { type: "auto" },
127
184
  toolWarnings,
128
185
  betas
129
186
  };
130
187
  case "required":
131
188
  return {
132
189
  tools: anthropicTools2,
133
- tool_choice: { type: "any" },
190
+ toolChoice: { type: "any" },
134
191
  toolWarnings,
135
192
  betas
136
193
  };
137
194
  case "none":
138
- return { tools: void 0, tool_choice: void 0, toolWarnings, betas };
195
+ return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
139
196
  case "tool":
140
197
  return {
141
198
  tools: anthropicTools2,
142
- tool_choice: { type: "tool", name: toolChoice.toolName },
199
+ toolChoice: { type: "tool", name: toolChoice.toolName },
143
200
  toolWarnings,
144
201
  betas
145
202
  };
146
203
  default: {
147
204
  const _exhaustiveCheck = type;
148
205
  throw new UnsupportedFunctionalityError({
149
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
206
+ functionality: `tool choice type: ${_exhaustiveCheck}`
150
207
  });
151
208
  }
152
209
  }
@@ -156,13 +213,13 @@ function prepareTools(mode) {
156
213
  import {
157
214
  UnsupportedFunctionalityError as UnsupportedFunctionalityError2
158
215
  } from "@ai-sdk/provider";
159
- import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
160
- function convertToAnthropicMessagesPrompt({
216
+ import { convertToBase64, parseProviderOptions } from "@ai-sdk/provider-utils";
217
+ async function convertToAnthropicMessagesPrompt({
161
218
  prompt,
162
219
  sendReasoning,
163
220
  warnings
164
221
  }) {
165
- var _a, _b, _c, _d;
222
+ var _a, _b, _c;
166
223
  const betas = /* @__PURE__ */ new Set();
167
224
  const blocks = groupIntoBlocks(prompt);
168
225
  let system = void 0;
@@ -184,10 +241,10 @@ function convertToAnthropicMessagesPrompt({
184
241
  functionality: "Multiple system messages that are separated by user/assistant messages"
185
242
  });
186
243
  }
187
- system = block.messages.map(({ content, providerMetadata }) => ({
244
+ system = block.messages.map(({ content, providerOptions }) => ({
188
245
  type: "text",
189
246
  text: content,
190
- cache_control: getCacheControl(providerMetadata)
247
+ cache_control: getCacheControl(providerOptions)
191
248
  }));
192
249
  break;
193
250
  }
@@ -200,7 +257,7 @@ function convertToAnthropicMessagesPrompt({
200
257
  for (let j = 0; j < content.length; j++) {
201
258
  const part = content[j];
202
259
  const isLastPart = j === content.length - 1;
203
- const cacheControl = (_a = getCacheControl(part.providerMetadata)) != null ? _a : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
260
+ const cacheControl = (_a = getCacheControl(part.providerOptions)) != null ? _a : isLastPart ? getCacheControl(message.providerOptions) : void 0;
204
261
  switch (part.type) {
205
262
  case "text": {
206
263
  anthropicContent.push({
@@ -210,40 +267,39 @@ function convertToAnthropicMessagesPrompt({
210
267
  });
211
268
  break;
212
269
  }
213
- case "image": {
214
- anthropicContent.push({
215
- type: "image",
216
- source: part.image instanceof URL ? {
217
- type: "url",
218
- url: part.image.toString()
219
- } : {
220
- type: "base64",
221
- media_type: (_b = part.mimeType) != null ? _b : "image/jpeg",
222
- data: convertUint8ArrayToBase64(part.image)
223
- },
224
- cache_control: cacheControl
225
- });
226
- break;
227
- }
228
270
  case "file": {
229
- if (part.mimeType !== "application/pdf") {
271
+ if (part.mediaType.startsWith("image/")) {
272
+ anthropicContent.push({
273
+ type: "image",
274
+ source: part.data instanceof URL ? {
275
+ type: "url",
276
+ url: part.data.toString()
277
+ } : {
278
+ type: "base64",
279
+ media_type: part.mediaType === "image/*" ? "image/jpeg" : part.mediaType,
280
+ data: convertToBase64(part.data)
281
+ },
282
+ cache_control: cacheControl
283
+ });
284
+ } else if (part.mediaType === "application/pdf") {
285
+ betas.add("pdfs-2024-09-25");
286
+ anthropicContent.push({
287
+ type: "document",
288
+ source: part.data instanceof URL ? {
289
+ type: "url",
290
+ url: part.data.toString()
291
+ } : {
292
+ type: "base64",
293
+ media_type: "application/pdf",
294
+ data: convertToBase64(part.data)
295
+ },
296
+ cache_control: cacheControl
297
+ });
298
+ } else {
230
299
  throw new UnsupportedFunctionalityError2({
231
- functionality: "Non-PDF files in user messages"
300
+ functionality: `media type: ${part.mediaType}`
232
301
  });
233
302
  }
234
- betas.add("pdfs-2024-09-25");
235
- anthropicContent.push({
236
- type: "document",
237
- source: part.data instanceof URL ? {
238
- type: "url",
239
- url: part.data.toString()
240
- } : {
241
- type: "base64",
242
- media_type: "application/pdf",
243
- data: part.data
244
- },
245
- cache_control: cacheControl
246
- });
247
303
  break;
248
304
  }
249
305
  }
@@ -254,7 +310,7 @@ function convertToAnthropicMessagesPrompt({
254
310
  for (let i2 = 0; i2 < content.length; i2++) {
255
311
  const part = content[i2];
256
312
  const isLastPart = i2 === content.length - 1;
257
- const cacheControl = (_c = getCacheControl(part.providerMetadata)) != null ? _c : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
313
+ const cacheControl = (_b = getCacheControl(part.providerOptions)) != null ? _b : isLastPart ? getCacheControl(message.providerOptions) : void 0;
258
314
  const toolResultContent = part.content != null ? part.content.map((part2) => {
259
315
  var _a2;
260
316
  switch (part2.type) {
@@ -269,7 +325,7 @@ function convertToAnthropicMessagesPrompt({
269
325
  type: "image",
270
326
  source: {
271
327
  type: "base64",
272
- media_type: (_a2 = part2.mimeType) != null ? _a2 : "image/jpeg",
328
+ media_type: (_a2 = part2.mediaType) != null ? _a2 : "image/jpeg",
273
329
  data: part2.data
274
330
  },
275
331
  cache_control: void 0
@@ -304,7 +360,7 @@ function convertToAnthropicMessagesPrompt({
304
360
  for (let k = 0; k < content.length; k++) {
305
361
  const part = content[k];
306
362
  const isLastContentPart = k === content.length - 1;
307
- const cacheControl = (_d = getCacheControl(part.providerMetadata)) != null ? _d : isLastContentPart ? getCacheControl(message.providerMetadata) : void 0;
363
+ const cacheControl = (_c = getCacheControl(part.providerOptions)) != null ? _c : isLastContentPart ? getCacheControl(message.providerOptions) : void 0;
308
364
  switch (part.type) {
309
365
  case "text": {
310
366
  anthropicContent.push({
@@ -321,12 +377,37 @@ function convertToAnthropicMessagesPrompt({
321
377
  }
322
378
  case "reasoning": {
323
379
  if (sendReasoning) {
324
- anthropicContent.push({
325
- type: "thinking",
326
- thinking: part.text,
327
- signature: part.signature,
328
- cache_control: cacheControl
380
+ const reasoningMetadata = await parseProviderOptions({
381
+ provider: "anthropic",
382
+ providerOptions: part.providerOptions,
383
+ schema: anthropicReasoningMetadataSchema
329
384
  });
385
+ if (reasoningMetadata != null) {
386
+ if (reasoningMetadata.signature != null) {
387
+ anthropicContent.push({
388
+ type: "thinking",
389
+ thinking: part.text,
390
+ signature: reasoningMetadata.signature,
391
+ cache_control: cacheControl
392
+ });
393
+ } else if (reasoningMetadata.redactedData != null) {
394
+ anthropicContent.push({
395
+ type: "redacted_thinking",
396
+ data: reasoningMetadata.redactedData,
397
+ cache_control: cacheControl
398
+ });
399
+ } else {
400
+ warnings.push({
401
+ type: "other",
402
+ message: "unsupported reasoning metadata"
403
+ });
404
+ }
405
+ } else {
406
+ warnings.push({
407
+ type: "other",
408
+ message: "unsupported reasoning metadata"
409
+ });
410
+ }
330
411
  } else {
331
412
  warnings.push({
332
413
  type: "other",
@@ -335,14 +416,6 @@ function convertToAnthropicMessagesPrompt({
335
416
  }
336
417
  break;
337
418
  }
338
- case "redacted-reasoning": {
339
- anthropicContent.push({
340
- type: "redacted_thinking",
341
- data: part.data,
342
- cache_control: cacheControl
343
- });
344
- break;
345
- }
346
419
  case "tool-call": {
347
420
  anthropicContent.push({
348
421
  type: "tool_use",
@@ -361,7 +434,7 @@ function convertToAnthropicMessagesPrompt({
361
434
  }
362
435
  default: {
363
436
  const _exhaustiveCheck = type;
364
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
437
+ throw new Error(`content type: ${_exhaustiveCheck}`);
365
438
  }
366
439
  }
367
440
  }
@@ -418,13 +491,16 @@ function groupIntoBlocks(prompt) {
418
491
  }
419
492
 
420
493
  // src/map-anthropic-stop-reason.ts
421
- function mapAnthropicStopReason(finishReason) {
494
+ function mapAnthropicStopReason({
495
+ finishReason,
496
+ isJsonResponseFromTool
497
+ }) {
422
498
  switch (finishReason) {
423
499
  case "end_turn":
424
500
  case "stop_sequence":
425
501
  return "stop";
426
502
  case "tool_use":
427
- return "tool-calls";
503
+ return isJsonResponseFromTool ? "stop" : "tool-calls";
428
504
  case "max_tokens":
429
505
  return "length";
430
506
  default:
@@ -434,12 +510,12 @@ function mapAnthropicStopReason(finishReason) {
434
510
 
435
511
  // src/anthropic-messages-language-model.ts
436
512
  var AnthropicMessagesLanguageModel = class {
437
- constructor(modelId, settings, config) {
438
- this.specificationVersion = "v1";
439
- this.defaultObjectGenerationMode = "tool";
513
+ constructor(modelId, config) {
514
+ this.specificationVersion = "v2";
515
+ var _a;
440
516
  this.modelId = modelId;
441
- this.settings = settings;
442
517
  this.config = config;
518
+ this.generateId = (_a = config.generateId) != null ? _a : generateId;
443
519
  }
444
520
  supportsUrl(url) {
445
521
  return url.protocol === "https:";
@@ -447,13 +523,13 @@ var AnthropicMessagesLanguageModel = class {
447
523
  get provider() {
448
524
  return this.config.provider;
449
525
  }
450
- get supportsImageUrls() {
451
- return this.config.supportsImageUrls;
526
+ get supportedUrls() {
527
+ var _a, _b, _c;
528
+ return (_c = (_b = (_a = this.config).supportedUrls) == null ? void 0 : _b.call(_a)) != null ? _c : {};
452
529
  }
453
530
  async getArgs({
454
- mode,
455
531
  prompt,
456
- maxTokens = 4096,
532
+ maxOutputTokens = 4096,
457
533
  // 4096: max model output tokens TODO update default in v5
458
534
  temperature,
459
535
  topP,
@@ -463,10 +539,11 @@ var AnthropicMessagesLanguageModel = class {
463
539
  stopSequences,
464
540
  responseFormat,
465
541
  seed,
466
- providerMetadata: providerOptions
542
+ tools,
543
+ toolChoice,
544
+ providerOptions
467
545
  }) {
468
546
  var _a, _b, _c;
469
- const type = mode.type;
470
547
  const warnings = [];
471
548
  if (frequencyPenalty != null) {
472
549
  warnings.push({
@@ -486,22 +563,36 @@ var AnthropicMessagesLanguageModel = class {
486
563
  setting: "seed"
487
564
  });
488
565
  }
489
- if (responseFormat != null && responseFormat.type !== "text") {
490
- warnings.push({
491
- type: "unsupported-setting",
492
- setting: "responseFormat",
493
- details: "JSON response format is not supported."
494
- });
566
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json") {
567
+ if (responseFormat.schema == null) {
568
+ warnings.push({
569
+ type: "unsupported-setting",
570
+ setting: "responseFormat",
571
+ details: "JSON response format requires a schema. The response format is ignored."
572
+ });
573
+ } else if (tools != null) {
574
+ warnings.push({
575
+ type: "unsupported-setting",
576
+ setting: "tools",
577
+ details: "JSON response format does not support tools. The provided tools are ignored."
578
+ });
579
+ }
495
580
  }
496
- const { prompt: messagesPrompt, betas: messagesBetas } = convertToAnthropicMessagesPrompt({
497
- prompt,
498
- sendReasoning: (_a = this.settings.sendReasoning) != null ? _a : true,
499
- warnings
500
- });
501
- const anthropicOptions = parseProviderOptions({
581
+ const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null ? {
582
+ type: "function",
583
+ name: "json",
584
+ description: "Respond with a JSON object.",
585
+ parameters: responseFormat.schema
586
+ } : void 0;
587
+ const anthropicOptions = await parseProviderOptions2({
502
588
  provider: "anthropic",
503
589
  providerOptions,
504
- schema: anthropicProviderOptionsSchema
590
+ schema: anthropicProviderOptions
591
+ });
592
+ const { prompt: messagesPrompt, betas: messagesBetas } = await convertToAnthropicMessagesPrompt({
593
+ prompt,
594
+ sendReasoning: (_a = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _a : true,
595
+ warnings
505
596
  });
506
597
  const isThinking = ((_b = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _b.type) === "enabled";
507
598
  const thinkingBudget = (_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.budgetTokens;
@@ -509,7 +600,7 @@ var AnthropicMessagesLanguageModel = class {
509
600
  // model id:
510
601
  model: this.modelId,
511
602
  // standardized settings:
512
- max_tokens: maxTokens,
603
+ max_tokens: maxOutputTokens,
513
604
  temperature,
514
605
  top_k: topK,
515
606
  top_p: topP,
@@ -552,44 +643,50 @@ var AnthropicMessagesLanguageModel = class {
552
643
  details: "topP is not supported when thinking is enabled"
553
644
  });
554
645
  }
555
- baseArgs.max_tokens = maxTokens + thinkingBudget;
646
+ baseArgs.max_tokens = maxOutputTokens + thinkingBudget;
556
647
  }
557
- switch (type) {
558
- case "regular": {
559
- const {
560
- tools,
561
- tool_choice,
562
- toolWarnings,
563
- betas: toolsBetas
564
- } = prepareTools(mode);
565
- return {
566
- args: { ...baseArgs, tools, tool_choice },
567
- warnings: [...warnings, ...toolWarnings],
568
- betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
569
- };
570
- }
571
- case "object-json": {
572
- throw new UnsupportedFunctionalityError3({
573
- functionality: "json-mode object generation"
574
- });
575
- }
576
- case "object-tool": {
577
- const { name, description, parameters } = mode.tool;
578
- return {
579
- args: {
580
- ...baseArgs,
581
- tools: [{ name, description, input_schema: parameters }],
582
- tool_choice: { type: "tool", name }
583
- },
584
- warnings,
585
- betas: messagesBetas
586
- };
587
- }
588
- default: {
589
- const _exhaustiveCheck = type;
590
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
591
- }
648
+ let modifiedTools = tools;
649
+ let modifiedToolChoice = toolChoice;
650
+ if (anthropicOptions == null ? void 0 : anthropicOptions.webSearch) {
651
+ const webSearchTool = {
652
+ type: "web_search_20250305",
653
+ name: "web_search",
654
+ max_uses: anthropicOptions.webSearch.maxUses,
655
+ allowed_domains: anthropicOptions.webSearch.allowedDomains,
656
+ blocked_domains: anthropicOptions.webSearch.blockedDomains,
657
+ ...anthropicOptions.webSearch.userLocation && {
658
+ user_location: {
659
+ type: anthropicOptions.webSearch.userLocation.type,
660
+ country: anthropicOptions.webSearch.userLocation.country,
661
+ city: anthropicOptions.webSearch.userLocation.city,
662
+ region: anthropicOptions.webSearch.userLocation.region,
663
+ timezone: anthropicOptions.webSearch.userLocation.timezone
664
+ }
665
+ }
666
+ };
667
+ modifiedTools = tools ? [...tools, webSearchTool] : [webSearchTool];
592
668
  }
669
+ const {
670
+ tools: anthropicTools2,
671
+ toolChoice: anthropicToolChoice,
672
+ toolWarnings,
673
+ betas: toolsBetas
674
+ } = prepareTools(
675
+ jsonResponseTool != null ? {
676
+ tools: [jsonResponseTool],
677
+ toolChoice: { type: "tool", toolName: jsonResponseTool.name }
678
+ } : { tools: modifiedTools, toolChoice: modifiedToolChoice }
679
+ );
680
+ return {
681
+ args: {
682
+ ...baseArgs,
683
+ tools: anthropicTools2,
684
+ tool_choice: anthropicToolChoice
685
+ },
686
+ warnings: [...warnings, ...toolWarnings],
687
+ betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas]),
688
+ jsonResponseTool
689
+ };
593
690
  }
594
691
  async getHeaders({
595
692
  betas,
@@ -610,8 +707,8 @@ var AnthropicMessagesLanguageModel = class {
610
707
  return (_c = (_b = (_a = this.config).transformRequestBody) == null ? void 0 : _b.call(_a, args)) != null ? _c : args;
611
708
  }
612
709
  async doGenerate(options) {
613
- var _a, _b, _c, _d;
614
- const { args, warnings, betas } = await this.getArgs(options);
710
+ var _a, _b, _c, _d, _e;
711
+ const { args, warnings, betas, jsonResponseTool } = await this.getArgs(options);
615
712
  const {
616
713
  responseHeaders,
617
714
  value: response,
@@ -627,69 +724,118 @@ var AnthropicMessagesLanguageModel = class {
627
724
  abortSignal: options.abortSignal,
628
725
  fetch: this.config.fetch
629
726
  });
630
- const { messages: rawPrompt, ...rawSettings } = args;
631
- let text = "";
632
- for (const content of response.content) {
633
- if (content.type === "text") {
634
- text += content.text;
635
- }
636
- }
637
- let toolCalls = void 0;
638
- if (response.content.some((content) => content.type === "tool_use")) {
639
- toolCalls = [];
640
- for (const content of response.content) {
641
- if (content.type === "tool_use") {
642
- toolCalls.push({
643
- toolCallType: "function",
644
- toolCallId: content.id,
645
- toolName: content.name,
646
- args: JSON.stringify(content.input)
727
+ const content = [];
728
+ for (const part of response.content) {
729
+ switch (part.type) {
730
+ case "text": {
731
+ if (jsonResponseTool == null) {
732
+ content.push({ type: "text", text: part.text });
733
+ }
734
+ break;
735
+ }
736
+ case "thinking": {
737
+ content.push({
738
+ type: "reasoning",
739
+ text: part.thinking,
740
+ providerMetadata: {
741
+ anthropic: {
742
+ signature: part.signature
743
+ }
744
+ }
647
745
  });
746
+ break;
747
+ }
748
+ case "redacted_thinking": {
749
+ content.push({
750
+ type: "reasoning",
751
+ text: "",
752
+ providerMetadata: {
753
+ anthropic: {
754
+ redactedData: part.data
755
+ }
756
+ }
757
+ });
758
+ break;
759
+ }
760
+ case "tool_use": {
761
+ content.push(
762
+ // when a json response tool is used, the tool call becomes the text:
763
+ jsonResponseTool != null ? {
764
+ type: "text",
765
+ text: JSON.stringify(part.input)
766
+ } : {
767
+ type: "tool-call",
768
+ toolCallType: "function",
769
+ toolCallId: part.id,
770
+ toolName: part.name,
771
+ args: JSON.stringify(part.input)
772
+ }
773
+ );
774
+ break;
775
+ }
776
+ case "server_tool_use": {
777
+ continue;
778
+ }
779
+ case "web_search_tool_result": {
780
+ if (Array.isArray(part.content)) {
781
+ for (const result of part.content) {
782
+ if (result.type === "web_search_result") {
783
+ content.push({
784
+ type: "source",
785
+ sourceType: "url",
786
+ id: this.generateId(),
787
+ url: result.url,
788
+ title: result.title,
789
+ providerMetadata: {
790
+ anthropic: {
791
+ encryptedContent: result.encrypted_content,
792
+ pageAge: (_a = result.page_age) != null ? _a : null
793
+ }
794
+ }
795
+ });
796
+ }
797
+ }
798
+ } else if (part.content.type === "web_search_tool_result_error") {
799
+ throw new APICallError({
800
+ message: `Web search failed: ${part.content.error_code}`,
801
+ url: "web_search_api",
802
+ requestBodyValues: { tool_use_id: part.tool_use_id },
803
+ data: { error_code: part.content.error_code }
804
+ });
805
+ }
806
+ break;
648
807
  }
649
808
  }
650
809
  }
651
- const reasoning = response.content.filter(
652
- (content) => content.type === "redacted_thinking" || content.type === "thinking"
653
- ).map(
654
- (content) => content.type === "thinking" ? {
655
- type: "text",
656
- text: content.thinking,
657
- signature: content.signature
658
- } : {
659
- type: "redacted",
660
- data: content.data
661
- }
662
- );
663
810
  return {
664
- text,
665
- reasoning: reasoning.length > 0 ? reasoning : void 0,
666
- toolCalls,
667
- finishReason: mapAnthropicStopReason(response.stop_reason),
811
+ content,
812
+ finishReason: mapAnthropicStopReason({
813
+ finishReason: response.stop_reason,
814
+ isJsonResponseFromTool: jsonResponseTool != null
815
+ }),
668
816
  usage: {
669
- promptTokens: response.usage.input_tokens,
670
- completionTokens: response.usage.output_tokens
817
+ inputTokens: response.usage.input_tokens,
818
+ outputTokens: response.usage.output_tokens,
819
+ totalTokens: response.usage.input_tokens + response.usage.output_tokens,
820
+ cachedInputTokens: (_b = response.usage.cache_read_input_tokens) != null ? _b : void 0
671
821
  },
672
- rawCall: { rawPrompt, rawSettings },
673
- rawResponse: {
822
+ request: { body: args },
823
+ response: {
824
+ id: (_c = response.id) != null ? _c : void 0,
825
+ modelId: (_d = response.model) != null ? _d : void 0,
674
826
  headers: responseHeaders,
675
827
  body: rawResponse
676
828
  },
677
- response: {
678
- id: (_a = response.id) != null ? _a : void 0,
679
- modelId: (_b = response.model) != null ? _b : void 0
680
- },
681
829
  warnings,
682
830
  providerMetadata: {
683
831
  anthropic: {
684
- cacheCreationInputTokens: (_c = response.usage.cache_creation_input_tokens) != null ? _c : null,
685
- cacheReadInputTokens: (_d = response.usage.cache_read_input_tokens) != null ? _d : null
832
+ cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null
686
833
  }
687
- },
688
- request: { body: JSON.stringify(args) }
834
+ }
689
835
  };
690
836
  }
691
837
  async doStream(options) {
692
- const { args, warnings, betas } = await this.getArgs(options);
838
+ const { args, warnings, betas, jsonResponseTool } = await this.getArgs(options);
693
839
  const body = { ...args, stream: true };
694
840
  const { responseHeaders, value: response } = await postJsonToApi({
695
841
  url: this.buildRequestUrl(true),
@@ -702,20 +848,25 @@ var AnthropicMessagesLanguageModel = class {
702
848
  abortSignal: options.abortSignal,
703
849
  fetch: this.config.fetch
704
850
  });
705
- const { messages: rawPrompt, ...rawSettings } = args;
706
851
  let finishReason = "unknown";
707
852
  const usage = {
708
- promptTokens: Number.NaN,
709
- completionTokens: Number.NaN
853
+ inputTokens: void 0,
854
+ outputTokens: void 0,
855
+ totalTokens: void 0
710
856
  };
711
857
  const toolCallContentBlocks = {};
712
858
  let providerMetadata = void 0;
713
859
  let blockType = void 0;
860
+ const config = this.config;
861
+ const generateId2 = this.generateId;
714
862
  return {
715
863
  stream: response.pipeThrough(
716
864
  new TransformStream({
865
+ start(controller) {
866
+ controller.enqueue({ type: "stream-start", warnings });
867
+ },
717
868
  transform(chunk, controller) {
718
- var _a, _b, _c, _d;
869
+ var _a, _b, _c, _d, _e, _f, _g;
719
870
  if (!chunk.success) {
720
871
  controller.enqueue({ type: "error", error: chunk.error });
721
872
  return;
@@ -735,9 +886,15 @@ var AnthropicMessagesLanguageModel = class {
735
886
  }
736
887
  case "redacted_thinking": {
737
888
  controller.enqueue({
738
- type: "redacted-reasoning",
739
- data: value.content_block.data
889
+ type: "reasoning",
890
+ text: "",
891
+ providerMetadata: {
892
+ anthropic: {
893
+ redactedData: value.content_block.data
894
+ }
895
+ }
740
896
  });
897
+ controller.enqueue({ type: "reasoning-part-finish" });
741
898
  return;
742
899
  }
743
900
  case "tool_use": {
@@ -748,6 +905,40 @@ var AnthropicMessagesLanguageModel = class {
748
905
  };
749
906
  return;
750
907
  }
908
+ case "server_tool_use": {
909
+ return;
910
+ }
911
+ case "web_search_tool_result": {
912
+ if (Array.isArray(value.content_block.content)) {
913
+ for (const result of value.content_block.content) {
914
+ if (result.type === "web_search_result") {
915
+ controller.enqueue({
916
+ type: "source",
917
+ sourceType: "url",
918
+ id: generateId2(),
919
+ url: result.url,
920
+ title: result.title,
921
+ providerMetadata: {
922
+ anthropic: {
923
+ encryptedContent: result.encrypted_content,
924
+ pageAge: (_a = result.page_age) != null ? _a : null
925
+ }
926
+ }
927
+ });
928
+ }
929
+ }
930
+ } else if (value.content_block.content.type === "web_search_tool_result_error") {
931
+ controller.enqueue({
932
+ type: "error",
933
+ error: {
934
+ type: "web-search-error",
935
+ message: `Web search failed: ${value.content_block.content.error_code}`,
936
+ code: value.content_block.content.error_code
937
+ }
938
+ });
939
+ }
940
+ return;
941
+ }
751
942
  default: {
752
943
  const _exhaustiveCheck = contentBlockType;
753
944
  throw new Error(
@@ -759,13 +950,15 @@ var AnthropicMessagesLanguageModel = class {
759
950
  case "content_block_stop": {
760
951
  if (toolCallContentBlocks[value.index] != null) {
761
952
  const contentBlock = toolCallContentBlocks[value.index];
762
- controller.enqueue({
763
- type: "tool-call",
764
- toolCallType: "function",
765
- toolCallId: contentBlock.toolCallId,
766
- toolName: contentBlock.toolName,
767
- args: contentBlock.jsonText
768
- });
953
+ if (jsonResponseTool == null) {
954
+ controller.enqueue({
955
+ type: "tool-call",
956
+ toolCallType: "function",
957
+ toolCallId: contentBlock.toolCallId,
958
+ toolName: contentBlock.toolName,
959
+ args: contentBlock.jsonText
960
+ });
961
+ }
769
962
  delete toolCallContentBlocks[value.index];
770
963
  }
771
964
  blockType = void 0;
@@ -775,40 +968,60 @@ var AnthropicMessagesLanguageModel = class {
775
968
  const deltaType = value.delta.type;
776
969
  switch (deltaType) {
777
970
  case "text_delta": {
971
+ if (jsonResponseTool != null) {
972
+ return;
973
+ }
778
974
  controller.enqueue({
779
- type: "text-delta",
780
- textDelta: value.delta.text
975
+ type: "text",
976
+ text: value.delta.text
781
977
  });
782
978
  return;
783
979
  }
784
980
  case "thinking_delta": {
785
981
  controller.enqueue({
786
982
  type: "reasoning",
787
- textDelta: value.delta.thinking
983
+ text: value.delta.thinking
788
984
  });
789
985
  return;
790
986
  }
791
987
  case "signature_delta": {
792
988
  if (blockType === "thinking") {
793
989
  controller.enqueue({
794
- type: "reasoning-signature",
795
- signature: value.delta.signature
990
+ type: "reasoning",
991
+ text: "",
992
+ providerMetadata: {
993
+ anthropic: {
994
+ signature: value.delta.signature
995
+ }
996
+ }
796
997
  });
998
+ controller.enqueue({ type: "reasoning-part-finish" });
797
999
  }
798
1000
  return;
799
1001
  }
800
1002
  case "input_json_delta": {
801
1003
  const contentBlock = toolCallContentBlocks[value.index];
802
- controller.enqueue({
803
- type: "tool-call-delta",
804
- toolCallType: "function",
805
- toolCallId: contentBlock.toolCallId,
806
- toolName: contentBlock.toolName,
807
- argsTextDelta: value.delta.partial_json
808
- });
1004
+ if (!contentBlock) {
1005
+ return;
1006
+ }
1007
+ controller.enqueue(
1008
+ jsonResponseTool != null ? {
1009
+ type: "text",
1010
+ text: value.delta.partial_json
1011
+ } : {
1012
+ type: "tool-call-delta",
1013
+ toolCallType: "function",
1014
+ toolCallId: contentBlock.toolCallId,
1015
+ toolName: contentBlock.toolName,
1016
+ argsTextDelta: value.delta.partial_json
1017
+ }
1018
+ );
809
1019
  contentBlock.jsonText += value.delta.partial_json;
810
1020
  return;
811
1021
  }
1022
+ case "citations_delta": {
1023
+ return;
1024
+ }
812
1025
  default: {
813
1026
  const _exhaustiveCheck = deltaType;
814
1027
  throw new Error(
@@ -818,24 +1031,27 @@ var AnthropicMessagesLanguageModel = class {
818
1031
  }
819
1032
  }
820
1033
  case "message_start": {
821
- usage.promptTokens = value.message.usage.input_tokens;
822
- usage.completionTokens = value.message.usage.output_tokens;
1034
+ usage.inputTokens = value.message.usage.input_tokens;
1035
+ usage.cachedInputTokens = (_b = value.message.usage.cache_read_input_tokens) != null ? _b : void 0;
823
1036
  providerMetadata = {
824
1037
  anthropic: {
825
- cacheCreationInputTokens: (_a = value.message.usage.cache_creation_input_tokens) != null ? _a : null,
826
- cacheReadInputTokens: (_b = value.message.usage.cache_read_input_tokens) != null ? _b : null
1038
+ cacheCreationInputTokens: (_c = value.message.usage.cache_creation_input_tokens) != null ? _c : null
827
1039
  }
828
1040
  };
829
1041
  controller.enqueue({
830
1042
  type: "response-metadata",
831
- id: (_c = value.message.id) != null ? _c : void 0,
832
- modelId: (_d = value.message.model) != null ? _d : void 0
1043
+ id: (_d = value.message.id) != null ? _d : void 0,
1044
+ modelId: (_e = value.message.model) != null ? _e : void 0
833
1045
  });
834
1046
  return;
835
1047
  }
836
1048
  case "message_delta": {
837
- usage.completionTokens = value.usage.output_tokens;
838
- finishReason = mapAnthropicStopReason(value.delta.stop_reason);
1049
+ usage.outputTokens = value.usage.output_tokens;
1050
+ usage.totalTokens = ((_f = usage.inputTokens) != null ? _f : 0) + ((_g = value.usage.output_tokens) != null ? _g : 0);
1051
+ finishReason = mapAnthropicStopReason({
1052
+ finishReason: value.delta.stop_reason,
1053
+ isJsonResponseFromTool: jsonResponseTool != null
1054
+ });
839
1055
  return;
840
1056
  }
841
1057
  case "message_stop": {
@@ -859,142 +1075,201 @@ var AnthropicMessagesLanguageModel = class {
859
1075
  }
860
1076
  })
861
1077
  ),
862
- rawCall: { rawPrompt, rawSettings },
863
- rawResponse: { headers: responseHeaders },
864
- warnings,
865
- request: { body: JSON.stringify(body) }
1078
+ request: { body },
1079
+ response: { headers: responseHeaders }
866
1080
  };
867
1081
  }
868
1082
  };
869
- var anthropicMessagesResponseSchema = z2.object({
870
- type: z2.literal("message"),
871
- id: z2.string().nullish(),
872
- model: z2.string().nullish(),
873
- content: z2.array(
874
- z2.discriminatedUnion("type", [
875
- z2.object({
876
- type: z2.literal("text"),
877
- text: z2.string()
1083
+ var anthropicMessagesResponseSchema = z3.object({
1084
+ type: z3.literal("message"),
1085
+ id: z3.string().nullish(),
1086
+ model: z3.string().nullish(),
1087
+ content: z3.array(
1088
+ z3.discriminatedUnion("type", [
1089
+ z3.object({
1090
+ type: z3.literal("text"),
1091
+ text: z3.string()
1092
+ }),
1093
+ z3.object({
1094
+ type: z3.literal("thinking"),
1095
+ thinking: z3.string(),
1096
+ signature: z3.string()
1097
+ }),
1098
+ z3.object({
1099
+ type: z3.literal("redacted_thinking"),
1100
+ data: z3.string()
878
1101
  }),
879
- z2.object({
880
- type: z2.literal("thinking"),
881
- thinking: z2.string(),
882
- signature: z2.string()
1102
+ z3.object({
1103
+ type: z3.literal("tool_use"),
1104
+ id: z3.string(),
1105
+ name: z3.string(),
1106
+ input: z3.unknown()
883
1107
  }),
884
- z2.object({
885
- type: z2.literal("redacted_thinking"),
886
- data: z2.string()
1108
+ z3.object({
1109
+ type: z3.literal("server_tool_use"),
1110
+ id: z3.string(),
1111
+ name: z3.string(),
1112
+ input: z3.record(z3.unknown()).nullish()
887
1113
  }),
888
- z2.object({
889
- type: z2.literal("tool_use"),
890
- id: z2.string(),
891
- name: z2.string(),
892
- input: z2.unknown()
1114
+ z3.object({
1115
+ type: z3.literal("web_search_tool_result"),
1116
+ tool_use_id: z3.string(),
1117
+ content: z3.union([
1118
+ z3.array(
1119
+ z3.object({
1120
+ type: z3.literal("web_search_result"),
1121
+ url: z3.string(),
1122
+ title: z3.string(),
1123
+ encrypted_content: z3.string(),
1124
+ page_age: z3.string().nullish()
1125
+ })
1126
+ ),
1127
+ z3.object({
1128
+ type: z3.literal("web_search_tool_result_error"),
1129
+ error_code: z3.string()
1130
+ })
1131
+ ])
893
1132
  })
894
1133
  ])
895
1134
  ),
896
- stop_reason: z2.string().nullish(),
897
- usage: z2.object({
898
- input_tokens: z2.number(),
899
- output_tokens: z2.number(),
900
- cache_creation_input_tokens: z2.number().nullish(),
901
- cache_read_input_tokens: z2.number().nullish()
1135
+ stop_reason: z3.string().nullish(),
1136
+ usage: z3.object({
1137
+ input_tokens: z3.number(),
1138
+ output_tokens: z3.number(),
1139
+ cache_creation_input_tokens: z3.number().nullish(),
1140
+ cache_read_input_tokens: z3.number().nullish(),
1141
+ server_tool_use: z3.object({
1142
+ web_search_requests: z3.number()
1143
+ }).nullish()
902
1144
  })
903
1145
  });
904
- var anthropicMessagesChunkSchema = z2.discriminatedUnion("type", [
905
- z2.object({
906
- type: z2.literal("message_start"),
907
- message: z2.object({
908
- id: z2.string().nullish(),
909
- model: z2.string().nullish(),
910
- usage: z2.object({
911
- input_tokens: z2.number(),
912
- output_tokens: z2.number(),
913
- cache_creation_input_tokens: z2.number().nullish(),
914
- cache_read_input_tokens: z2.number().nullish()
1146
+ var anthropicMessagesChunkSchema = z3.discriminatedUnion("type", [
1147
+ z3.object({
1148
+ type: z3.literal("message_start"),
1149
+ message: z3.object({
1150
+ id: z3.string().nullish(),
1151
+ model: z3.string().nullish(),
1152
+ usage: z3.object({
1153
+ input_tokens: z3.number(),
1154
+ output_tokens: z3.number(),
1155
+ cache_creation_input_tokens: z3.number().nullish(),
1156
+ cache_read_input_tokens: z3.number().nullish()
915
1157
  })
916
1158
  })
917
1159
  }),
918
- z2.object({
919
- type: z2.literal("content_block_start"),
920
- index: z2.number(),
921
- content_block: z2.discriminatedUnion("type", [
922
- z2.object({
923
- type: z2.literal("text"),
924
- text: z2.string()
1160
+ z3.object({
1161
+ type: z3.literal("content_block_start"),
1162
+ index: z3.number(),
1163
+ content_block: z3.discriminatedUnion("type", [
1164
+ z3.object({
1165
+ type: z3.literal("text"),
1166
+ text: z3.string()
925
1167
  }),
926
- z2.object({
927
- type: z2.literal("thinking"),
928
- thinking: z2.string()
1168
+ z3.object({
1169
+ type: z3.literal("thinking"),
1170
+ thinking: z3.string()
929
1171
  }),
930
- z2.object({
931
- type: z2.literal("tool_use"),
932
- id: z2.string(),
933
- name: z2.string()
1172
+ z3.object({
1173
+ type: z3.literal("tool_use"),
1174
+ id: z3.string(),
1175
+ name: z3.string()
934
1176
  }),
935
- z2.object({
936
- type: z2.literal("redacted_thinking"),
937
- data: z2.string()
1177
+ z3.object({
1178
+ type: z3.literal("redacted_thinking"),
1179
+ data: z3.string()
1180
+ }),
1181
+ z3.object({
1182
+ type: z3.literal("server_tool_use"),
1183
+ id: z3.string(),
1184
+ name: z3.string(),
1185
+ input: z3.record(z3.unknown()).nullish()
1186
+ }),
1187
+ z3.object({
1188
+ type: z3.literal("web_search_tool_result"),
1189
+ tool_use_id: z3.string(),
1190
+ content: z3.union([
1191
+ z3.array(
1192
+ z3.object({
1193
+ type: z3.literal("web_search_result"),
1194
+ url: z3.string(),
1195
+ title: z3.string(),
1196
+ encrypted_content: z3.string(),
1197
+ page_age: z3.string().nullish()
1198
+ })
1199
+ ),
1200
+ z3.object({
1201
+ type: z3.literal("web_search_tool_result_error"),
1202
+ error_code: z3.string()
1203
+ })
1204
+ ])
938
1205
  })
939
1206
  ])
940
1207
  }),
941
- z2.object({
942
- type: z2.literal("content_block_delta"),
943
- index: z2.number(),
944
- delta: z2.discriminatedUnion("type", [
945
- z2.object({
946
- type: z2.literal("input_json_delta"),
947
- partial_json: z2.string()
1208
+ z3.object({
1209
+ type: z3.literal("content_block_delta"),
1210
+ index: z3.number(),
1211
+ delta: z3.discriminatedUnion("type", [
1212
+ z3.object({
1213
+ type: z3.literal("input_json_delta"),
1214
+ partial_json: z3.string()
1215
+ }),
1216
+ z3.object({
1217
+ type: z3.literal("text_delta"),
1218
+ text: z3.string()
948
1219
  }),
949
- z2.object({
950
- type: z2.literal("text_delta"),
951
- text: z2.string()
1220
+ z3.object({
1221
+ type: z3.literal("thinking_delta"),
1222
+ thinking: z3.string()
952
1223
  }),
953
- z2.object({
954
- type: z2.literal("thinking_delta"),
955
- thinking: z2.string()
1224
+ z3.object({
1225
+ type: z3.literal("signature_delta"),
1226
+ signature: z3.string()
956
1227
  }),
957
- z2.object({
958
- type: z2.literal("signature_delta"),
959
- signature: z2.string()
1228
+ z3.object({
1229
+ type: z3.literal("citations_delta"),
1230
+ citation: z3.object({
1231
+ type: z3.literal("web_search_result_location"),
1232
+ cited_text: z3.string(),
1233
+ url: z3.string(),
1234
+ title: z3.string(),
1235
+ encrypted_index: z3.string()
1236
+ })
960
1237
  })
961
1238
  ])
962
1239
  }),
963
- z2.object({
964
- type: z2.literal("content_block_stop"),
965
- index: z2.number()
1240
+ z3.object({
1241
+ type: z3.literal("content_block_stop"),
1242
+ index: z3.number()
966
1243
  }),
967
- z2.object({
968
- type: z2.literal("error"),
969
- error: z2.object({
970
- type: z2.string(),
971
- message: z2.string()
1244
+ z3.object({
1245
+ type: z3.literal("error"),
1246
+ error: z3.object({
1247
+ type: z3.string(),
1248
+ message: z3.string()
972
1249
  })
973
1250
  }),
974
- z2.object({
975
- type: z2.literal("message_delta"),
976
- delta: z2.object({ stop_reason: z2.string().nullish() }),
977
- usage: z2.object({ output_tokens: z2.number() })
1251
+ z3.object({
1252
+ type: z3.literal("message_delta"),
1253
+ delta: z3.object({ stop_reason: z3.string().nullish() }),
1254
+ usage: z3.object({ output_tokens: z3.number() })
978
1255
  }),
979
- z2.object({
980
- type: z2.literal("message_stop")
1256
+ z3.object({
1257
+ type: z3.literal("message_stop")
981
1258
  }),
982
- z2.object({
983
- type: z2.literal("ping")
1259
+ z3.object({
1260
+ type: z3.literal("ping")
984
1261
  })
985
1262
  ]);
986
- var anthropicProviderOptionsSchema = z2.object({
987
- thinking: z2.object({
988
- type: z2.union([z2.literal("enabled"), z2.literal("disabled")]),
989
- budgetTokens: z2.number().optional()
990
- }).optional()
1263
+ var anthropicReasoningMetadataSchema = z3.object({
1264
+ signature: z3.string().optional(),
1265
+ redactedData: z3.string().optional()
991
1266
  });
992
1267
 
993
1268
  // src/anthropic-tools.ts
994
- import { z as z3 } from "zod";
995
- var Bash20241022Parameters = z3.object({
996
- command: z3.string(),
997
- restart: z3.boolean().optional()
1269
+ import { z as z4 } from "zod";
1270
+ var Bash20241022Parameters = z4.object({
1271
+ command: z4.string(),
1272
+ restart: z4.boolean().optional()
998
1273
  });
999
1274
  function bashTool_20241022(options = {}) {
1000
1275
  return {
@@ -1006,9 +1281,9 @@ function bashTool_20241022(options = {}) {
1006
1281
  experimental_toToolResultContent: options.experimental_toToolResultContent
1007
1282
  };
1008
1283
  }
1009
- var Bash20250124Parameters = z3.object({
1010
- command: z3.string(),
1011
- restart: z3.boolean().optional()
1284
+ var Bash20250124Parameters = z4.object({
1285
+ command: z4.string(),
1286
+ restart: z4.boolean().optional()
1012
1287
  });
1013
1288
  function bashTool_20250124(options = {}) {
1014
1289
  return {
@@ -1020,14 +1295,14 @@ function bashTool_20250124(options = {}) {
1020
1295
  experimental_toToolResultContent: options.experimental_toToolResultContent
1021
1296
  };
1022
1297
  }
1023
- var TextEditor20241022Parameters = z3.object({
1024
- command: z3.enum(["view", "create", "str_replace", "insert", "undo_edit"]),
1025
- path: z3.string(),
1026
- file_text: z3.string().optional(),
1027
- insert_line: z3.number().int().optional(),
1028
- new_str: z3.string().optional(),
1029
- old_str: z3.string().optional(),
1030
- view_range: z3.array(z3.number().int()).optional()
1298
+ var TextEditor20241022Parameters = z4.object({
1299
+ command: z4.enum(["view", "create", "str_replace", "insert", "undo_edit"]),
1300
+ path: z4.string(),
1301
+ file_text: z4.string().optional(),
1302
+ insert_line: z4.number().int().optional(),
1303
+ new_str: z4.string().optional(),
1304
+ old_str: z4.string().optional(),
1305
+ view_range: z4.array(z4.number().int()).optional()
1031
1306
  });
1032
1307
  function textEditorTool_20241022(options = {}) {
1033
1308
  return {
@@ -1039,14 +1314,14 @@ function textEditorTool_20241022(options = {}) {
1039
1314
  experimental_toToolResultContent: options.experimental_toToolResultContent
1040
1315
  };
1041
1316
  }
1042
- var TextEditor20250124Parameters = z3.object({
1043
- command: z3.enum(["view", "create", "str_replace", "insert", "undo_edit"]),
1044
- path: z3.string(),
1045
- file_text: z3.string().optional(),
1046
- insert_line: z3.number().int().optional(),
1047
- new_str: z3.string().optional(),
1048
- old_str: z3.string().optional(),
1049
- view_range: z3.array(z3.number().int()).optional()
1317
+ var TextEditor20250124Parameters = z4.object({
1318
+ command: z4.enum(["view", "create", "str_replace", "insert", "undo_edit"]),
1319
+ path: z4.string(),
1320
+ file_text: z4.string().optional(),
1321
+ insert_line: z4.number().int().optional(),
1322
+ new_str: z4.string().optional(),
1323
+ old_str: z4.string().optional(),
1324
+ view_range: z4.array(z4.number().int()).optional()
1050
1325
  });
1051
1326
  function textEditorTool_20250124(options = {}) {
1052
1327
  return {
@@ -1058,8 +1333,8 @@ function textEditorTool_20250124(options = {}) {
1058
1333
  experimental_toToolResultContent: options.experimental_toToolResultContent
1059
1334
  };
1060
1335
  }
1061
- var Computer20241022Parameters = z3.object({
1062
- action: z3.enum([
1336
+ var Computer20241022Parameters = z4.object({
1337
+ action: z4.enum([
1063
1338
  "key",
1064
1339
  "type",
1065
1340
  "mouse_move",
@@ -1071,8 +1346,8 @@ var Computer20241022Parameters = z3.object({
1071
1346
  "screenshot",
1072
1347
  "cursor_position"
1073
1348
  ]),
1074
- coordinate: z3.array(z3.number().int()).optional(),
1075
- text: z3.string().optional()
1349
+ coordinate: z4.array(z4.number().int()).optional(),
1350
+ text: z4.string().optional()
1076
1351
  });
1077
1352
  function computerTool_20241022(options) {
1078
1353
  return {
@@ -1088,8 +1363,8 @@ function computerTool_20241022(options) {
1088
1363
  experimental_toToolResultContent: options.experimental_toToolResultContent
1089
1364
  };
1090
1365
  }
1091
- var Computer20250124Parameters = z3.object({
1092
- action: z3.enum([
1366
+ var Computer20250124Parameters = z4.object({
1367
+ action: z4.enum([
1093
1368
  "key",
1094
1369
  "hold_key",
1095
1370
  "type",
@@ -1107,12 +1382,12 @@ var Computer20250124Parameters = z3.object({
1107
1382
  "wait",
1108
1383
  "screenshot"
1109
1384
  ]),
1110
- coordinate: z3.tuple([z3.number().int(), z3.number().int()]).optional(),
1111
- duration: z3.number().optional(),
1112
- scroll_amount: z3.number().optional(),
1113
- scroll_direction: z3.enum(["up", "down", "left", "right"]).optional(),
1114
- start_coordinate: z3.tuple([z3.number().int(), z3.number().int()]).optional(),
1115
- text: z3.string().optional()
1385
+ coordinate: z4.tuple([z4.number().int(), z4.number().int()]).optional(),
1386
+ duration: z4.number().optional(),
1387
+ scroll_amount: z4.number().optional(),
1388
+ scroll_direction: z4.enum(["up", "down", "left", "right"]).optional(),
1389
+ start_coordinate: z4.tuple([z4.number().int(), z4.number().int()]).optional(),
1390
+ text: z4.string().optional()
1116
1391
  });
1117
1392
  function computerTool_20250124(options) {
1118
1393
  return {