@ai-sdk/openai 2.0.24 → 2.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1952,7 +1952,13 @@ var OpenAITranscriptionModel = class {
1952
1952
  };
1953
1953
  for (const [key, value] of Object.entries(transcriptionModelOptions)) {
1954
1954
  if (value != null) {
1955
- formData.append(key, String(value));
1955
+ if (Array.isArray(value)) {
1956
+ for (const item of value) {
1957
+ formData.append(`${key}[]`, String(item));
1958
+ }
1959
+ } else {
1960
+ formData.append(key, String(value));
1961
+ }
1956
1962
  }
1957
1963
  }
1958
1964
  }
@@ -2143,8 +2149,8 @@ var OpenAISpeechModel = class {
2143
2149
 
2144
2150
  // src/responses/openai-responses-language-model.ts
2145
2151
  var import_provider8 = require("@ai-sdk/provider");
2146
- var import_provider_utils14 = require("@ai-sdk/provider-utils");
2147
- var import_v416 = require("zod/v4");
2152
+ var import_provider_utils15 = require("@ai-sdk/provider-utils");
2153
+ var import_v417 = require("zod/v4");
2148
2154
 
2149
2155
  // src/responses/convert-to-openai-responses-messages.ts
2150
2156
  var import_provider6 = require("@ai-sdk/provider");
@@ -2347,18 +2353,18 @@ var openaiResponsesReasoningProviderOptionsSchema = import_v414.z.object({
2347
2353
  // src/responses/map-openai-responses-finish-reason.ts
2348
2354
  function mapOpenAIResponseFinishReason({
2349
2355
  finishReason,
2350
- hasToolCalls
2356
+ hasFunctionCall
2351
2357
  }) {
2352
2358
  switch (finishReason) {
2353
2359
  case void 0:
2354
2360
  case null:
2355
- return hasToolCalls ? "tool-calls" : "stop";
2361
+ return hasFunctionCall ? "tool-calls" : "stop";
2356
2362
  case "max_output_tokens":
2357
2363
  return "length";
2358
2364
  case "content_filter":
2359
2365
  return "content-filter";
2360
2366
  default:
2361
- return hasToolCalls ? "tool-calls" : "unknown";
2367
+ return hasFunctionCall ? "tool-calls" : "unknown";
2362
2368
  }
2363
2369
  }
2364
2370
 
@@ -2382,6 +2388,44 @@ var codeInterpreter = (0, import_provider_utils13.createProviderDefinedToolFacto
2382
2388
  inputSchema: import_v415.z.object({})
2383
2389
  });
2384
2390
 
2391
+ // src/tool/web-search.ts
2392
+ var import_provider_utils14 = require("@ai-sdk/provider-utils");
2393
+ var import_v416 = require("zod/v4");
2394
+ var webSearchArgsSchema = import_v416.z.object({
2395
+ filters: import_v416.z.object({
2396
+ allowedDomains: import_v416.z.array(import_v416.z.string()).optional()
2397
+ }).optional(),
2398
+ searchContextSize: import_v416.z.enum(["low", "medium", "high"]).optional(),
2399
+ userLocation: import_v416.z.object({
2400
+ type: import_v416.z.literal("approximate"),
2401
+ country: import_v416.z.string().optional(),
2402
+ city: import_v416.z.string().optional(),
2403
+ region: import_v416.z.string().optional(),
2404
+ timezone: import_v416.z.string().optional()
2405
+ }).optional()
2406
+ });
2407
+ var factory = (0, import_provider_utils14.createProviderDefinedToolFactory)({
2408
+ id: "openai.web_search",
2409
+ name: "web_search",
2410
+ inputSchema: import_v416.z.object({
2411
+ action: import_v416.z.discriminatedUnion("type", [
2412
+ import_v416.z.object({
2413
+ type: import_v416.z.literal("search"),
2414
+ query: import_v416.z.string().nullish()
2415
+ }),
2416
+ import_v416.z.object({
2417
+ type: import_v416.z.literal("open_page"),
2418
+ url: import_v416.z.string()
2419
+ }),
2420
+ import_v416.z.object({
2421
+ type: import_v416.z.literal("find"),
2422
+ url: import_v416.z.string(),
2423
+ pattern: import_v416.z.string()
2424
+ })
2425
+ ]).nullish()
2426
+ })
2427
+ });
2428
+
2385
2429
  // src/responses/openai-responses-prepare-tools.ts
2386
2430
  function prepareResponsesTools({
2387
2431
  tools,
@@ -2427,6 +2471,16 @@ function prepareResponsesTools({
2427
2471
  });
2428
2472
  break;
2429
2473
  }
2474
+ case "openai.web_search": {
2475
+ const args = webSearchArgsSchema.parse(tool.args);
2476
+ openaiTools.push({
2477
+ type: "web_search",
2478
+ filters: args.filters != null ? { allowed_domains: args.filters.allowedDomains } : void 0,
2479
+ search_context_size: args.searchContextSize,
2480
+ user_location: args.userLocation
2481
+ });
2482
+ break;
2483
+ }
2430
2484
  case "openai.code_interpreter": {
2431
2485
  const args = codeInterpreterArgsSchema.parse(tool.args);
2432
2486
  openaiTools.push({
@@ -2459,7 +2513,7 @@ function prepareResponsesTools({
2459
2513
  case "tool":
2460
2514
  return {
2461
2515
  tools: openaiTools,
2462
- toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
2516
+ toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" || toolChoice.toolName === "web_search" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
2463
2517
  toolWarnings
2464
2518
  };
2465
2519
  default: {
@@ -2472,35 +2526,35 @@ function prepareResponsesTools({
2472
2526
  }
2473
2527
 
2474
2528
  // src/responses/openai-responses-language-model.ts
2475
- var webSearchCallItem = import_v416.z.object({
2476
- type: import_v416.z.literal("web_search_call"),
2477
- id: import_v416.z.string(),
2478
- status: import_v416.z.string(),
2479
- action: import_v416.z.discriminatedUnion("type", [
2480
- import_v416.z.object({
2481
- type: import_v416.z.literal("search"),
2482
- query: import_v416.z.string().nullish()
2529
+ var webSearchCallItem = import_v417.z.object({
2530
+ type: import_v417.z.literal("web_search_call"),
2531
+ id: import_v417.z.string(),
2532
+ status: import_v417.z.string(),
2533
+ action: import_v417.z.discriminatedUnion("type", [
2534
+ import_v417.z.object({
2535
+ type: import_v417.z.literal("search"),
2536
+ query: import_v417.z.string().nullish()
2483
2537
  }),
2484
- import_v416.z.object({
2485
- type: import_v416.z.literal("open_page"),
2486
- url: import_v416.z.string()
2538
+ import_v417.z.object({
2539
+ type: import_v417.z.literal("open_page"),
2540
+ url: import_v417.z.string()
2487
2541
  }),
2488
- import_v416.z.object({
2489
- type: import_v416.z.literal("find"),
2490
- url: import_v416.z.string(),
2491
- pattern: import_v416.z.string()
2542
+ import_v417.z.object({
2543
+ type: import_v417.z.literal("find"),
2544
+ url: import_v417.z.string(),
2545
+ pattern: import_v417.z.string()
2492
2546
  })
2493
2547
  ]).nullish()
2494
2548
  });
2495
2549
  var TOP_LOGPROBS_MAX = 20;
2496
- var LOGPROBS_SCHEMA = import_v416.z.array(
2497
- import_v416.z.object({
2498
- token: import_v416.z.string(),
2499
- logprob: import_v416.z.number(),
2500
- top_logprobs: import_v416.z.array(
2501
- import_v416.z.object({
2502
- token: import_v416.z.string(),
2503
- logprob: import_v416.z.number()
2550
+ var LOGPROBS_SCHEMA = import_v417.z.array(
2551
+ import_v417.z.object({
2552
+ token: import_v417.z.string(),
2553
+ logprob: import_v417.z.number(),
2554
+ top_logprobs: import_v417.z.array(
2555
+ import_v417.z.object({
2556
+ token: import_v417.z.string(),
2557
+ logprob: import_v417.z.number()
2504
2558
  })
2505
2559
  )
2506
2560
  })
@@ -2563,7 +2617,7 @@ var OpenAIResponsesLanguageModel = class {
2563
2617
  fileIdPrefixes: this.config.fileIdPrefixes
2564
2618
  });
2565
2619
  warnings.push(...messageWarnings);
2566
- const openaiOptions = await (0, import_provider_utils14.parseProviderOptions)({
2620
+ const openaiOptions = await (0, import_provider_utils15.parseProviderOptions)({
2567
2621
  provider: "openai",
2568
2622
  providerOptions,
2569
2623
  schema: openaiResponsesProviderOptionsSchema
@@ -2698,98 +2752,98 @@ var OpenAIResponsesLanguageModel = class {
2698
2752
  responseHeaders,
2699
2753
  value: response,
2700
2754
  rawValue: rawResponse
2701
- } = await (0, import_provider_utils14.postJsonToApi)({
2755
+ } = await (0, import_provider_utils15.postJsonToApi)({
2702
2756
  url,
2703
- headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
2757
+ headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
2704
2758
  body,
2705
2759
  failedResponseHandler: openaiFailedResponseHandler,
2706
- successfulResponseHandler: (0, import_provider_utils14.createJsonResponseHandler)(
2707
- import_v416.z.object({
2708
- id: import_v416.z.string(),
2709
- created_at: import_v416.z.number(),
2710
- error: import_v416.z.object({
2711
- code: import_v416.z.string(),
2712
- message: import_v416.z.string()
2760
+ successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
2761
+ import_v417.z.object({
2762
+ id: import_v417.z.string(),
2763
+ created_at: import_v417.z.number(),
2764
+ error: import_v417.z.object({
2765
+ code: import_v417.z.string(),
2766
+ message: import_v417.z.string()
2713
2767
  }).nullish(),
2714
- model: import_v416.z.string(),
2715
- output: import_v416.z.array(
2716
- import_v416.z.discriminatedUnion("type", [
2717
- import_v416.z.object({
2718
- type: import_v416.z.literal("message"),
2719
- role: import_v416.z.literal("assistant"),
2720
- id: import_v416.z.string(),
2721
- content: import_v416.z.array(
2722
- import_v416.z.object({
2723
- type: import_v416.z.literal("output_text"),
2724
- text: import_v416.z.string(),
2768
+ model: import_v417.z.string(),
2769
+ output: import_v417.z.array(
2770
+ import_v417.z.discriminatedUnion("type", [
2771
+ import_v417.z.object({
2772
+ type: import_v417.z.literal("message"),
2773
+ role: import_v417.z.literal("assistant"),
2774
+ id: import_v417.z.string(),
2775
+ content: import_v417.z.array(
2776
+ import_v417.z.object({
2777
+ type: import_v417.z.literal("output_text"),
2778
+ text: import_v417.z.string(),
2725
2779
  logprobs: LOGPROBS_SCHEMA.nullish(),
2726
- annotations: import_v416.z.array(
2727
- import_v416.z.discriminatedUnion("type", [
2728
- import_v416.z.object({
2729
- type: import_v416.z.literal("url_citation"),
2730
- start_index: import_v416.z.number(),
2731
- end_index: import_v416.z.number(),
2732
- url: import_v416.z.string(),
2733
- title: import_v416.z.string()
2780
+ annotations: import_v417.z.array(
2781
+ import_v417.z.discriminatedUnion("type", [
2782
+ import_v417.z.object({
2783
+ type: import_v417.z.literal("url_citation"),
2784
+ start_index: import_v417.z.number(),
2785
+ end_index: import_v417.z.number(),
2786
+ url: import_v417.z.string(),
2787
+ title: import_v417.z.string()
2734
2788
  }),
2735
- import_v416.z.object({
2736
- type: import_v416.z.literal("file_citation"),
2737
- file_id: import_v416.z.string(),
2738
- filename: import_v416.z.string().nullish(),
2739
- index: import_v416.z.number().nullish(),
2740
- start_index: import_v416.z.number().nullish(),
2741
- end_index: import_v416.z.number().nullish(),
2742
- quote: import_v416.z.string().nullish()
2789
+ import_v417.z.object({
2790
+ type: import_v417.z.literal("file_citation"),
2791
+ file_id: import_v417.z.string(),
2792
+ filename: import_v417.z.string().nullish(),
2793
+ index: import_v417.z.number().nullish(),
2794
+ start_index: import_v417.z.number().nullish(),
2795
+ end_index: import_v417.z.number().nullish(),
2796
+ quote: import_v417.z.string().nullish()
2743
2797
  })
2744
2798
  ])
2745
2799
  )
2746
2800
  })
2747
2801
  )
2748
2802
  }),
2749
- import_v416.z.object({
2750
- type: import_v416.z.literal("function_call"),
2751
- call_id: import_v416.z.string(),
2752
- name: import_v416.z.string(),
2753
- arguments: import_v416.z.string(),
2754
- id: import_v416.z.string()
2803
+ import_v417.z.object({
2804
+ type: import_v417.z.literal("function_call"),
2805
+ call_id: import_v417.z.string(),
2806
+ name: import_v417.z.string(),
2807
+ arguments: import_v417.z.string(),
2808
+ id: import_v417.z.string()
2755
2809
  }),
2756
2810
  webSearchCallItem,
2757
- import_v416.z.object({
2758
- type: import_v416.z.literal("computer_call"),
2759
- id: import_v416.z.string(),
2760
- status: import_v416.z.string().optional()
2811
+ import_v417.z.object({
2812
+ type: import_v417.z.literal("computer_call"),
2813
+ id: import_v417.z.string(),
2814
+ status: import_v417.z.string().optional()
2761
2815
  }),
2762
- import_v416.z.object({
2763
- type: import_v416.z.literal("file_search_call"),
2764
- id: import_v416.z.string(),
2765
- status: import_v416.z.string().optional(),
2766
- queries: import_v416.z.array(import_v416.z.string()).nullish(),
2767
- results: import_v416.z.array(
2768
- import_v416.z.object({
2769
- attributes: import_v416.z.object({
2770
- file_id: import_v416.z.string(),
2771
- filename: import_v416.z.string(),
2772
- score: import_v416.z.number(),
2773
- text: import_v416.z.string()
2816
+ import_v417.z.object({
2817
+ type: import_v417.z.literal("file_search_call"),
2818
+ id: import_v417.z.string(),
2819
+ status: import_v417.z.string().optional(),
2820
+ queries: import_v417.z.array(import_v417.z.string()).nullish(),
2821
+ results: import_v417.z.array(
2822
+ import_v417.z.object({
2823
+ attributes: import_v417.z.object({
2824
+ file_id: import_v417.z.string(),
2825
+ filename: import_v417.z.string(),
2826
+ score: import_v417.z.number(),
2827
+ text: import_v417.z.string()
2774
2828
  })
2775
2829
  })
2776
2830
  ).nullish()
2777
2831
  }),
2778
- import_v416.z.object({
2779
- type: import_v416.z.literal("reasoning"),
2780
- id: import_v416.z.string(),
2781
- encrypted_content: import_v416.z.string().nullish(),
2782
- summary: import_v416.z.array(
2783
- import_v416.z.object({
2784
- type: import_v416.z.literal("summary_text"),
2785
- text: import_v416.z.string()
2832
+ import_v417.z.object({
2833
+ type: import_v417.z.literal("reasoning"),
2834
+ id: import_v417.z.string(),
2835
+ encrypted_content: import_v417.z.string().nullish(),
2836
+ summary: import_v417.z.array(
2837
+ import_v417.z.object({
2838
+ type: import_v417.z.literal("summary_text"),
2839
+ text: import_v417.z.string()
2786
2840
  })
2787
2841
  )
2788
2842
  })
2789
2843
  ])
2790
2844
  ),
2791
- service_tier: import_v416.z.string().nullish(),
2792
- incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullable(),
2845
+ service_tier: import_v417.z.string().nullish(),
2846
+ incomplete_details: import_v417.z.object({ reason: import_v417.z.string() }).nullable(),
2793
2847
  usage: usageSchema2
2794
2848
  })
2795
2849
  ),
@@ -2809,6 +2863,7 @@ var OpenAIResponsesLanguageModel = class {
2809
2863
  }
2810
2864
  const content = [];
2811
2865
  const logprobs = [];
2866
+ let hasFunctionCall = false;
2812
2867
  for (const part of response.output) {
2813
2868
  switch (part.type) {
2814
2869
  case "reasoning": {
@@ -2848,7 +2903,7 @@ var OpenAIResponsesLanguageModel = class {
2848
2903
  content.push({
2849
2904
  type: "source",
2850
2905
  sourceType: "url",
2851
- id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils14.generateId)(),
2906
+ id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils15.generateId)(),
2852
2907
  url: annotation.url,
2853
2908
  title: annotation.title
2854
2909
  });
@@ -2856,7 +2911,7 @@ var OpenAIResponsesLanguageModel = class {
2856
2911
  content.push({
2857
2912
  type: "source",
2858
2913
  sourceType: "document",
2859
- id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils14.generateId)(),
2914
+ id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils15.generateId)(),
2860
2915
  mediaType: "text/plain",
2861
2916
  title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
2862
2917
  filename: (_l = annotation.filename) != null ? _l : annotation.file_id
@@ -2867,6 +2922,7 @@ var OpenAIResponsesLanguageModel = class {
2867
2922
  break;
2868
2923
  }
2869
2924
  case "function_call": {
2925
+ hasFunctionCall = true;
2870
2926
  content.push({
2871
2927
  type: "tool-call",
2872
2928
  toolCallId: part.call_id,
@@ -2954,7 +3010,7 @@ var OpenAIResponsesLanguageModel = class {
2954
3010
  content,
2955
3011
  finishReason: mapOpenAIResponseFinishReason({
2956
3012
  finishReason: (_m = response.incomplete_details) == null ? void 0 : _m.reason,
2957
- hasToolCalls: content.some((part) => part.type === "tool-call")
3013
+ hasFunctionCall
2958
3014
  }),
2959
3015
  usage: {
2960
3016
  inputTokens: response.usage.input_tokens,
@@ -2977,18 +3033,18 @@ var OpenAIResponsesLanguageModel = class {
2977
3033
  }
2978
3034
  async doStream(options) {
2979
3035
  const { args: body, warnings } = await this.getArgs(options);
2980
- const { responseHeaders, value: response } = await (0, import_provider_utils14.postJsonToApi)({
3036
+ const { responseHeaders, value: response } = await (0, import_provider_utils15.postJsonToApi)({
2981
3037
  url: this.config.url({
2982
3038
  path: "/responses",
2983
3039
  modelId: this.modelId
2984
3040
  }),
2985
- headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
3041
+ headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
2986
3042
  body: {
2987
3043
  ...body,
2988
3044
  stream: true
2989
3045
  },
2990
3046
  failedResponseHandler: openaiFailedResponseHandler,
2991
- successfulResponseHandler: (0, import_provider_utils14.createEventSourceResponseHandler)(
3047
+ successfulResponseHandler: (0, import_provider_utils15.createEventSourceResponseHandler)(
2992
3048
  openaiResponsesChunkSchema
2993
3049
  ),
2994
3050
  abortSignal: options.abortSignal,
@@ -3004,7 +3060,7 @@ var OpenAIResponsesLanguageModel = class {
3004
3060
  const logprobs = [];
3005
3061
  let responseId = null;
3006
3062
  const ongoingToolCalls = {};
3007
- let hasToolCalls = false;
3063
+ let hasFunctionCall = false;
3008
3064
  const activeReasoning = {};
3009
3065
  let serviceTier;
3010
3066
  return {
@@ -3094,7 +3150,7 @@ var OpenAIResponsesLanguageModel = class {
3094
3150
  } else if (isResponseOutputItemDoneChunk(value)) {
3095
3151
  if (value.item.type === "function_call") {
3096
3152
  ongoingToolCalls[value.output_index] = void 0;
3097
- hasToolCalls = true;
3153
+ hasFunctionCall = true;
3098
3154
  controller.enqueue({
3099
3155
  type: "tool-input-end",
3100
3156
  id: value.item.call_id
@@ -3112,7 +3168,6 @@ var OpenAIResponsesLanguageModel = class {
3112
3168
  });
3113
3169
  } else if (value.item.type === "web_search_call") {
3114
3170
  ongoingToolCalls[value.output_index] = void 0;
3115
- hasToolCalls = true;
3116
3171
  controller.enqueue({
3117
3172
  type: "tool-input-end",
3118
3173
  id: value.item.id
@@ -3120,20 +3175,19 @@ var OpenAIResponsesLanguageModel = class {
3120
3175
  controller.enqueue({
3121
3176
  type: "tool-call",
3122
3177
  toolCallId: value.item.id,
3123
- toolName: "web_search_preview",
3178
+ toolName: "web_search",
3124
3179
  input: JSON.stringify({ action: value.item.action }),
3125
3180
  providerExecuted: true
3126
3181
  });
3127
3182
  controller.enqueue({
3128
3183
  type: "tool-result",
3129
3184
  toolCallId: value.item.id,
3130
- toolName: "web_search_preview",
3185
+ toolName: "web_search",
3131
3186
  result: { status: value.item.status },
3132
3187
  providerExecuted: true
3133
3188
  });
3134
3189
  } else if (value.item.type === "computer_call") {
3135
3190
  ongoingToolCalls[value.output_index] = void 0;
3136
- hasToolCalls = true;
3137
3191
  controller.enqueue({
3138
3192
  type: "tool-input-end",
3139
3193
  id: value.item.id
@@ -3157,7 +3211,6 @@ var OpenAIResponsesLanguageModel = class {
3157
3211
  });
3158
3212
  } else if (value.item.type === "file_search_call") {
3159
3213
  ongoingToolCalls[value.output_index] = void 0;
3160
- hasToolCalls = true;
3161
3214
  controller.enqueue({
3162
3215
  type: "tool-input-end",
3163
3216
  id: value.item.id
@@ -3258,7 +3311,7 @@ var OpenAIResponsesLanguageModel = class {
3258
3311
  } else if (isResponseFinishedChunk(value)) {
3259
3312
  finishReason = mapOpenAIResponseFinishReason({
3260
3313
  finishReason: (_h = value.response.incomplete_details) == null ? void 0 : _h.reason,
3261
- hasToolCalls
3314
+ hasFunctionCall
3262
3315
  });
3263
3316
  usage.inputTokens = value.response.usage.input_tokens;
3264
3317
  usage.outputTokens = value.response.usage.output_tokens;
@@ -3273,7 +3326,7 @@ var OpenAIResponsesLanguageModel = class {
3273
3326
  controller.enqueue({
3274
3327
  type: "source",
3275
3328
  sourceType: "url",
3276
- id: (_o = (_n = (_m = self.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils14.generateId)(),
3329
+ id: (_o = (_n = (_m = self.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils15.generateId)(),
3277
3330
  url: value.annotation.url,
3278
3331
  title: value.annotation.title
3279
3332
  });
@@ -3281,7 +3334,7 @@ var OpenAIResponsesLanguageModel = class {
3281
3334
  controller.enqueue({
3282
3335
  type: "source",
3283
3336
  sourceType: "document",
3284
- id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0, import_provider_utils14.generateId)(),
3337
+ id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0, import_provider_utils15.generateId)(),
3285
3338
  mediaType: "text/plain",
3286
3339
  title: (_t = (_s = value.annotation.quote) != null ? _s : value.annotation.filename) != null ? _t : "Document",
3287
3340
  filename: (_u = value.annotation.filename) != null ? _u : value.annotation.file_id
@@ -3317,176 +3370,176 @@ var OpenAIResponsesLanguageModel = class {
3317
3370
  };
3318
3371
  }
3319
3372
  };
3320
- var usageSchema2 = import_v416.z.object({
3321
- input_tokens: import_v416.z.number(),
3322
- input_tokens_details: import_v416.z.object({ cached_tokens: import_v416.z.number().nullish() }).nullish(),
3323
- output_tokens: import_v416.z.number(),
3324
- output_tokens_details: import_v416.z.object({ reasoning_tokens: import_v416.z.number().nullish() }).nullish()
3373
+ var usageSchema2 = import_v417.z.object({
3374
+ input_tokens: import_v417.z.number(),
3375
+ input_tokens_details: import_v417.z.object({ cached_tokens: import_v417.z.number().nullish() }).nullish(),
3376
+ output_tokens: import_v417.z.number(),
3377
+ output_tokens_details: import_v417.z.object({ reasoning_tokens: import_v417.z.number().nullish() }).nullish()
3325
3378
  });
3326
- var textDeltaChunkSchema = import_v416.z.object({
3327
- type: import_v416.z.literal("response.output_text.delta"),
3328
- item_id: import_v416.z.string(),
3329
- delta: import_v416.z.string(),
3379
+ var textDeltaChunkSchema = import_v417.z.object({
3380
+ type: import_v417.z.literal("response.output_text.delta"),
3381
+ item_id: import_v417.z.string(),
3382
+ delta: import_v417.z.string(),
3330
3383
  logprobs: LOGPROBS_SCHEMA.nullish()
3331
3384
  });
3332
- var errorChunkSchema = import_v416.z.object({
3333
- type: import_v416.z.literal("error"),
3334
- code: import_v416.z.string(),
3335
- message: import_v416.z.string(),
3336
- param: import_v416.z.string().nullish(),
3337
- sequence_number: import_v416.z.number()
3385
+ var errorChunkSchema = import_v417.z.object({
3386
+ type: import_v417.z.literal("error"),
3387
+ code: import_v417.z.string(),
3388
+ message: import_v417.z.string(),
3389
+ param: import_v417.z.string().nullish(),
3390
+ sequence_number: import_v417.z.number()
3338
3391
  });
3339
- var responseFinishedChunkSchema = import_v416.z.object({
3340
- type: import_v416.z.enum(["response.completed", "response.incomplete"]),
3341
- response: import_v416.z.object({
3342
- incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
3392
+ var responseFinishedChunkSchema = import_v417.z.object({
3393
+ type: import_v417.z.enum(["response.completed", "response.incomplete"]),
3394
+ response: import_v417.z.object({
3395
+ incomplete_details: import_v417.z.object({ reason: import_v417.z.string() }).nullish(),
3343
3396
  usage: usageSchema2,
3344
- service_tier: import_v416.z.string().nullish()
3397
+ service_tier: import_v417.z.string().nullish()
3345
3398
  })
3346
3399
  });
3347
- var responseCreatedChunkSchema = import_v416.z.object({
3348
- type: import_v416.z.literal("response.created"),
3349
- response: import_v416.z.object({
3350
- id: import_v416.z.string(),
3351
- created_at: import_v416.z.number(),
3352
- model: import_v416.z.string(),
3353
- service_tier: import_v416.z.string().nullish()
3400
+ var responseCreatedChunkSchema = import_v417.z.object({
3401
+ type: import_v417.z.literal("response.created"),
3402
+ response: import_v417.z.object({
3403
+ id: import_v417.z.string(),
3404
+ created_at: import_v417.z.number(),
3405
+ model: import_v417.z.string(),
3406
+ service_tier: import_v417.z.string().nullish()
3354
3407
  })
3355
3408
  });
3356
- var responseOutputItemAddedSchema = import_v416.z.object({
3357
- type: import_v416.z.literal("response.output_item.added"),
3358
- output_index: import_v416.z.number(),
3359
- item: import_v416.z.discriminatedUnion("type", [
3360
- import_v416.z.object({
3361
- type: import_v416.z.literal("message"),
3362
- id: import_v416.z.string()
3409
+ var responseOutputItemAddedSchema = import_v417.z.object({
3410
+ type: import_v417.z.literal("response.output_item.added"),
3411
+ output_index: import_v417.z.number(),
3412
+ item: import_v417.z.discriminatedUnion("type", [
3413
+ import_v417.z.object({
3414
+ type: import_v417.z.literal("message"),
3415
+ id: import_v417.z.string()
3363
3416
  }),
3364
- import_v416.z.object({
3365
- type: import_v416.z.literal("reasoning"),
3366
- id: import_v416.z.string(),
3367
- encrypted_content: import_v416.z.string().nullish()
3417
+ import_v417.z.object({
3418
+ type: import_v417.z.literal("reasoning"),
3419
+ id: import_v417.z.string(),
3420
+ encrypted_content: import_v417.z.string().nullish()
3368
3421
  }),
3369
- import_v416.z.object({
3370
- type: import_v416.z.literal("function_call"),
3371
- id: import_v416.z.string(),
3372
- call_id: import_v416.z.string(),
3373
- name: import_v416.z.string(),
3374
- arguments: import_v416.z.string()
3422
+ import_v417.z.object({
3423
+ type: import_v417.z.literal("function_call"),
3424
+ id: import_v417.z.string(),
3425
+ call_id: import_v417.z.string(),
3426
+ name: import_v417.z.string(),
3427
+ arguments: import_v417.z.string()
3375
3428
  }),
3376
- import_v416.z.object({
3377
- type: import_v416.z.literal("web_search_call"),
3378
- id: import_v416.z.string(),
3379
- status: import_v416.z.string(),
3380
- action: import_v416.z.object({
3381
- type: import_v416.z.literal("search"),
3382
- query: import_v416.z.string().optional()
3429
+ import_v417.z.object({
3430
+ type: import_v417.z.literal("web_search_call"),
3431
+ id: import_v417.z.string(),
3432
+ status: import_v417.z.string(),
3433
+ action: import_v417.z.object({
3434
+ type: import_v417.z.literal("search"),
3435
+ query: import_v417.z.string().optional()
3383
3436
  }).nullish()
3384
3437
  }),
3385
- import_v416.z.object({
3386
- type: import_v416.z.literal("computer_call"),
3387
- id: import_v416.z.string(),
3388
- status: import_v416.z.string()
3438
+ import_v417.z.object({
3439
+ type: import_v417.z.literal("computer_call"),
3440
+ id: import_v417.z.string(),
3441
+ status: import_v417.z.string()
3389
3442
  }),
3390
- import_v416.z.object({
3391
- type: import_v416.z.literal("file_search_call"),
3392
- id: import_v416.z.string(),
3393
- status: import_v416.z.string(),
3394
- queries: import_v416.z.array(import_v416.z.string()).nullish(),
3395
- results: import_v416.z.array(
3396
- import_v416.z.object({
3397
- attributes: import_v416.z.object({
3398
- file_id: import_v416.z.string(),
3399
- filename: import_v416.z.string(),
3400
- score: import_v416.z.number(),
3401
- text: import_v416.z.string()
3443
+ import_v417.z.object({
3444
+ type: import_v417.z.literal("file_search_call"),
3445
+ id: import_v417.z.string(),
3446
+ status: import_v417.z.string(),
3447
+ queries: import_v417.z.array(import_v417.z.string()).nullish(),
3448
+ results: import_v417.z.array(
3449
+ import_v417.z.object({
3450
+ attributes: import_v417.z.object({
3451
+ file_id: import_v417.z.string(),
3452
+ filename: import_v417.z.string(),
3453
+ score: import_v417.z.number(),
3454
+ text: import_v417.z.string()
3402
3455
  })
3403
3456
  })
3404
3457
  ).optional()
3405
3458
  })
3406
3459
  ])
3407
3460
  });
3408
- var responseOutputItemDoneSchema = import_v416.z.object({
3409
- type: import_v416.z.literal("response.output_item.done"),
3410
- output_index: import_v416.z.number(),
3411
- item: import_v416.z.discriminatedUnion("type", [
3412
- import_v416.z.object({
3413
- type: import_v416.z.literal("message"),
3414
- id: import_v416.z.string()
3461
+ var responseOutputItemDoneSchema = import_v417.z.object({
3462
+ type: import_v417.z.literal("response.output_item.done"),
3463
+ output_index: import_v417.z.number(),
3464
+ item: import_v417.z.discriminatedUnion("type", [
3465
+ import_v417.z.object({
3466
+ type: import_v417.z.literal("message"),
3467
+ id: import_v417.z.string()
3415
3468
  }),
3416
- import_v416.z.object({
3417
- type: import_v416.z.literal("reasoning"),
3418
- id: import_v416.z.string(),
3419
- encrypted_content: import_v416.z.string().nullish()
3469
+ import_v417.z.object({
3470
+ type: import_v417.z.literal("reasoning"),
3471
+ id: import_v417.z.string(),
3472
+ encrypted_content: import_v417.z.string().nullish()
3420
3473
  }),
3421
- import_v416.z.object({
3422
- type: import_v416.z.literal("function_call"),
3423
- id: import_v416.z.string(),
3424
- call_id: import_v416.z.string(),
3425
- name: import_v416.z.string(),
3426
- arguments: import_v416.z.string(),
3427
- status: import_v416.z.literal("completed")
3474
+ import_v417.z.object({
3475
+ type: import_v417.z.literal("function_call"),
3476
+ id: import_v417.z.string(),
3477
+ call_id: import_v417.z.string(),
3478
+ name: import_v417.z.string(),
3479
+ arguments: import_v417.z.string(),
3480
+ status: import_v417.z.literal("completed")
3428
3481
  }),
3429
3482
  webSearchCallItem,
3430
- import_v416.z.object({
3431
- type: import_v416.z.literal("computer_call"),
3432
- id: import_v416.z.string(),
3433
- status: import_v416.z.literal("completed")
3483
+ import_v417.z.object({
3484
+ type: import_v417.z.literal("computer_call"),
3485
+ id: import_v417.z.string(),
3486
+ status: import_v417.z.literal("completed")
3434
3487
  }),
3435
- import_v416.z.object({
3436
- type: import_v416.z.literal("file_search_call"),
3437
- id: import_v416.z.string(),
3438
- status: import_v416.z.literal("completed"),
3439
- queries: import_v416.z.array(import_v416.z.string()).nullish(),
3440
- results: import_v416.z.array(
3441
- import_v416.z.object({
3442
- attributes: import_v416.z.object({
3443
- file_id: import_v416.z.string(),
3444
- filename: import_v416.z.string(),
3445
- score: import_v416.z.number(),
3446
- text: import_v416.z.string()
3488
+ import_v417.z.object({
3489
+ type: import_v417.z.literal("file_search_call"),
3490
+ id: import_v417.z.string(),
3491
+ status: import_v417.z.literal("completed"),
3492
+ queries: import_v417.z.array(import_v417.z.string()).nullish(),
3493
+ results: import_v417.z.array(
3494
+ import_v417.z.object({
3495
+ attributes: import_v417.z.object({
3496
+ file_id: import_v417.z.string(),
3497
+ filename: import_v417.z.string(),
3498
+ score: import_v417.z.number(),
3499
+ text: import_v417.z.string()
3447
3500
  })
3448
3501
  })
3449
3502
  ).nullish()
3450
3503
  })
3451
3504
  ])
3452
3505
  });
3453
- var responseFunctionCallArgumentsDeltaSchema = import_v416.z.object({
3454
- type: import_v416.z.literal("response.function_call_arguments.delta"),
3455
- item_id: import_v416.z.string(),
3456
- output_index: import_v416.z.number(),
3457
- delta: import_v416.z.string()
3506
+ var responseFunctionCallArgumentsDeltaSchema = import_v417.z.object({
3507
+ type: import_v417.z.literal("response.function_call_arguments.delta"),
3508
+ item_id: import_v417.z.string(),
3509
+ output_index: import_v417.z.number(),
3510
+ delta: import_v417.z.string()
3458
3511
  });
3459
- var responseAnnotationAddedSchema = import_v416.z.object({
3460
- type: import_v416.z.literal("response.output_text.annotation.added"),
3461
- annotation: import_v416.z.discriminatedUnion("type", [
3462
- import_v416.z.object({
3463
- type: import_v416.z.literal("url_citation"),
3464
- url: import_v416.z.string(),
3465
- title: import_v416.z.string()
3512
+ var responseAnnotationAddedSchema = import_v417.z.object({
3513
+ type: import_v417.z.literal("response.output_text.annotation.added"),
3514
+ annotation: import_v417.z.discriminatedUnion("type", [
3515
+ import_v417.z.object({
3516
+ type: import_v417.z.literal("url_citation"),
3517
+ url: import_v417.z.string(),
3518
+ title: import_v417.z.string()
3466
3519
  }),
3467
- import_v416.z.object({
3468
- type: import_v416.z.literal("file_citation"),
3469
- file_id: import_v416.z.string(),
3470
- filename: import_v416.z.string().nullish(),
3471
- index: import_v416.z.number().nullish(),
3472
- start_index: import_v416.z.number().nullish(),
3473
- end_index: import_v416.z.number().nullish(),
3474
- quote: import_v416.z.string().nullish()
3520
+ import_v417.z.object({
3521
+ type: import_v417.z.literal("file_citation"),
3522
+ file_id: import_v417.z.string(),
3523
+ filename: import_v417.z.string().nullish(),
3524
+ index: import_v417.z.number().nullish(),
3525
+ start_index: import_v417.z.number().nullish(),
3526
+ end_index: import_v417.z.number().nullish(),
3527
+ quote: import_v417.z.string().nullish()
3475
3528
  })
3476
3529
  ])
3477
3530
  });
3478
- var responseReasoningSummaryPartAddedSchema = import_v416.z.object({
3479
- type: import_v416.z.literal("response.reasoning_summary_part.added"),
3480
- item_id: import_v416.z.string(),
3481
- summary_index: import_v416.z.number()
3531
+ var responseReasoningSummaryPartAddedSchema = import_v417.z.object({
3532
+ type: import_v417.z.literal("response.reasoning_summary_part.added"),
3533
+ item_id: import_v417.z.string(),
3534
+ summary_index: import_v417.z.number()
3482
3535
  });
3483
- var responseReasoningSummaryTextDeltaSchema = import_v416.z.object({
3484
- type: import_v416.z.literal("response.reasoning_summary_text.delta"),
3485
- item_id: import_v416.z.string(),
3486
- summary_index: import_v416.z.number(),
3487
- delta: import_v416.z.string()
3536
+ var responseReasoningSummaryTextDeltaSchema = import_v417.z.object({
3537
+ type: import_v417.z.literal("response.reasoning_summary_text.delta"),
3538
+ item_id: import_v417.z.string(),
3539
+ summary_index: import_v417.z.number(),
3540
+ delta: import_v417.z.string()
3488
3541
  });
3489
- var openaiResponsesChunkSchema = import_v416.z.union([
3542
+ var openaiResponsesChunkSchema = import_v417.z.union([
3490
3543
  textDeltaChunkSchema,
3491
3544
  responseFinishedChunkSchema,
3492
3545
  responseCreatedChunkSchema,
@@ -3497,7 +3550,7 @@ var openaiResponsesChunkSchema = import_v416.z.union([
3497
3550
  responseReasoningSummaryPartAddedSchema,
3498
3551
  responseReasoningSummaryTextDeltaSchema,
3499
3552
  errorChunkSchema,
3500
- import_v416.z.object({ type: import_v416.z.string() }).loose()
3553
+ import_v417.z.object({ type: import_v417.z.string() }).loose()
3501
3554
  // fallback for unknown chunks
3502
3555
  ]);
3503
3556
  function isTextDeltaChunk(chunk) {
@@ -3570,27 +3623,27 @@ function getResponsesModelConfig(modelId) {
3570
3623
  isReasoningModel: false
3571
3624
  };
3572
3625
  }
3573
- var openaiResponsesProviderOptionsSchema = import_v416.z.object({
3574
- metadata: import_v416.z.any().nullish(),
3575
- parallelToolCalls: import_v416.z.boolean().nullish(),
3576
- previousResponseId: import_v416.z.string().nullish(),
3577
- store: import_v416.z.boolean().nullish(),
3578
- user: import_v416.z.string().nullish(),
3579
- reasoningEffort: import_v416.z.string().nullish(),
3580
- strictJsonSchema: import_v416.z.boolean().nullish(),
3581
- instructions: import_v416.z.string().nullish(),
3582
- reasoningSummary: import_v416.z.string().nullish(),
3583
- serviceTier: import_v416.z.enum(["auto", "flex", "priority"]).nullish(),
3584
- include: import_v416.z.array(
3585
- import_v416.z.enum([
3626
+ var openaiResponsesProviderOptionsSchema = import_v417.z.object({
3627
+ metadata: import_v417.z.any().nullish(),
3628
+ parallelToolCalls: import_v417.z.boolean().nullish(),
3629
+ previousResponseId: import_v417.z.string().nullish(),
3630
+ store: import_v417.z.boolean().nullish(),
3631
+ user: import_v417.z.string().nullish(),
3632
+ reasoningEffort: import_v417.z.string().nullish(),
3633
+ strictJsonSchema: import_v417.z.boolean().nullish(),
3634
+ instructions: import_v417.z.string().nullish(),
3635
+ reasoningSummary: import_v417.z.string().nullish(),
3636
+ serviceTier: import_v417.z.enum(["auto", "flex", "priority"]).nullish(),
3637
+ include: import_v417.z.array(
3638
+ import_v417.z.enum([
3586
3639
  "reasoning.encrypted_content",
3587
3640
  "file_search_call.results",
3588
3641
  "message.output_text.logprobs"
3589
3642
  ])
3590
3643
  ).nullish(),
3591
- textVerbosity: import_v416.z.enum(["low", "medium", "high"]).nullish(),
3592
- promptCacheKey: import_v416.z.string().nullish(),
3593
- safetyIdentifier: import_v416.z.string().nullish(),
3644
+ textVerbosity: import_v417.z.enum(["low", "medium", "high"]).nullish(),
3645
+ promptCacheKey: import_v417.z.string().nullish(),
3646
+ safetyIdentifier: import_v417.z.string().nullish(),
3594
3647
  /**
3595
3648
  * Return the log probabilities of the tokens.
3596
3649
  *
@@ -3603,7 +3656,7 @@ var openaiResponsesProviderOptionsSchema = import_v416.z.object({
3603
3656
  * @see https://platform.openai.com/docs/api-reference/responses/create
3604
3657
  * @see https://cookbook.openai.com/examples/using_logprobs
3605
3658
  */
3606
- logprobs: import_v416.z.union([import_v416.z.boolean(), import_v416.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional()
3659
+ logprobs: import_v417.z.union([import_v417.z.boolean(), import_v417.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional()
3607
3660
  });
3608
3661
  // Annotate the CommonJS export names for ESM import in node:
3609
3662
  0 && (module.exports = {