@ai-sdk/openai 2.0.24 → 2.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1955,7 +1955,13 @@ var OpenAITranscriptionModel = class {
1955
1955
  };
1956
1956
  for (const [key, value] of Object.entries(transcriptionModelOptions)) {
1957
1957
  if (value != null) {
1958
- formData.append(key, String(value));
1958
+ if (Array.isArray(value)) {
1959
+ for (const item of value) {
1960
+ formData.append(`${key}[]`, String(item));
1961
+ }
1962
+ } else {
1963
+ formData.append(key, String(value));
1964
+ }
1959
1965
  }
1960
1966
  }
1961
1967
  }
@@ -2161,7 +2167,7 @@ import {
2161
2167
  parseProviderOptions as parseProviderOptions7,
2162
2168
  postJsonToApi as postJsonToApi6
2163
2169
  } from "@ai-sdk/provider-utils";
2164
- import { z as z16 } from "zod/v4";
2170
+ import { z as z17 } from "zod/v4";
2165
2171
 
2166
2172
  // src/responses/convert-to-openai-responses-messages.ts
2167
2173
  import {
@@ -2366,18 +2372,18 @@ var openaiResponsesReasoningProviderOptionsSchema = z14.object({
2366
2372
  // src/responses/map-openai-responses-finish-reason.ts
2367
2373
  function mapOpenAIResponseFinishReason({
2368
2374
  finishReason,
2369
- hasToolCalls
2375
+ hasFunctionCall
2370
2376
  }) {
2371
2377
  switch (finishReason) {
2372
2378
  case void 0:
2373
2379
  case null:
2374
- return hasToolCalls ? "tool-calls" : "stop";
2380
+ return hasFunctionCall ? "tool-calls" : "stop";
2375
2381
  case "max_output_tokens":
2376
2382
  return "length";
2377
2383
  case "content_filter":
2378
2384
  return "content-filter";
2379
2385
  default:
2380
- return hasToolCalls ? "tool-calls" : "unknown";
2386
+ return hasFunctionCall ? "tool-calls" : "unknown";
2381
2387
  }
2382
2388
  }
2383
2389
 
@@ -2403,6 +2409,44 @@ var codeInterpreter = createProviderDefinedToolFactory3({
2403
2409
  inputSchema: z15.object({})
2404
2410
  });
2405
2411
 
2412
+ // src/tool/web-search.ts
2413
+ import { createProviderDefinedToolFactory as createProviderDefinedToolFactory4 } from "@ai-sdk/provider-utils";
2414
+ import { z as z16 } from "zod/v4";
2415
+ var webSearchArgsSchema = z16.object({
2416
+ filters: z16.object({
2417
+ allowedDomains: z16.array(z16.string()).optional()
2418
+ }).optional(),
2419
+ searchContextSize: z16.enum(["low", "medium", "high"]).optional(),
2420
+ userLocation: z16.object({
2421
+ type: z16.literal("approximate"),
2422
+ country: z16.string().optional(),
2423
+ city: z16.string().optional(),
2424
+ region: z16.string().optional(),
2425
+ timezone: z16.string().optional()
2426
+ }).optional()
2427
+ });
2428
+ var factory = createProviderDefinedToolFactory4({
2429
+ id: "openai.web_search",
2430
+ name: "web_search",
2431
+ inputSchema: z16.object({
2432
+ action: z16.discriminatedUnion("type", [
2433
+ z16.object({
2434
+ type: z16.literal("search"),
2435
+ query: z16.string().nullish()
2436
+ }),
2437
+ z16.object({
2438
+ type: z16.literal("open_page"),
2439
+ url: z16.string()
2440
+ }),
2441
+ z16.object({
2442
+ type: z16.literal("find"),
2443
+ url: z16.string(),
2444
+ pattern: z16.string()
2445
+ })
2446
+ ]).nullish()
2447
+ })
2448
+ });
2449
+
2406
2450
  // src/responses/openai-responses-prepare-tools.ts
2407
2451
  function prepareResponsesTools({
2408
2452
  tools,
@@ -2448,6 +2492,16 @@ function prepareResponsesTools({
2448
2492
  });
2449
2493
  break;
2450
2494
  }
2495
+ case "openai.web_search": {
2496
+ const args = webSearchArgsSchema.parse(tool.args);
2497
+ openaiTools.push({
2498
+ type: "web_search",
2499
+ filters: args.filters != null ? { allowed_domains: args.filters.allowedDomains } : void 0,
2500
+ search_context_size: args.searchContextSize,
2501
+ user_location: args.userLocation
2502
+ });
2503
+ break;
2504
+ }
2451
2505
  case "openai.code_interpreter": {
2452
2506
  const args = codeInterpreterArgsSchema.parse(tool.args);
2453
2507
  openaiTools.push({
@@ -2480,7 +2534,7 @@ function prepareResponsesTools({
2480
2534
  case "tool":
2481
2535
  return {
2482
2536
  tools: openaiTools,
2483
- toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
2537
+ toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" || toolChoice.toolName === "web_search" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
2484
2538
  toolWarnings
2485
2539
  };
2486
2540
  default: {
@@ -2493,35 +2547,35 @@ function prepareResponsesTools({
2493
2547
  }
2494
2548
 
2495
2549
  // src/responses/openai-responses-language-model.ts
2496
- var webSearchCallItem = z16.object({
2497
- type: z16.literal("web_search_call"),
2498
- id: z16.string(),
2499
- status: z16.string(),
2500
- action: z16.discriminatedUnion("type", [
2501
- z16.object({
2502
- type: z16.literal("search"),
2503
- query: z16.string().nullish()
2550
+ var webSearchCallItem = z17.object({
2551
+ type: z17.literal("web_search_call"),
2552
+ id: z17.string(),
2553
+ status: z17.string(),
2554
+ action: z17.discriminatedUnion("type", [
2555
+ z17.object({
2556
+ type: z17.literal("search"),
2557
+ query: z17.string().nullish()
2504
2558
  }),
2505
- z16.object({
2506
- type: z16.literal("open_page"),
2507
- url: z16.string()
2559
+ z17.object({
2560
+ type: z17.literal("open_page"),
2561
+ url: z17.string()
2508
2562
  }),
2509
- z16.object({
2510
- type: z16.literal("find"),
2511
- url: z16.string(),
2512
- pattern: z16.string()
2563
+ z17.object({
2564
+ type: z17.literal("find"),
2565
+ url: z17.string(),
2566
+ pattern: z17.string()
2513
2567
  })
2514
2568
  ]).nullish()
2515
2569
  });
2516
2570
  var TOP_LOGPROBS_MAX = 20;
2517
- var LOGPROBS_SCHEMA = z16.array(
2518
- z16.object({
2519
- token: z16.string(),
2520
- logprob: z16.number(),
2521
- top_logprobs: z16.array(
2522
- z16.object({
2523
- token: z16.string(),
2524
- logprob: z16.number()
2571
+ var LOGPROBS_SCHEMA = z17.array(
2572
+ z17.object({
2573
+ token: z17.string(),
2574
+ logprob: z17.number(),
2575
+ top_logprobs: z17.array(
2576
+ z17.object({
2577
+ token: z17.string(),
2578
+ logprob: z17.number()
2525
2579
  })
2526
2580
  )
2527
2581
  })
@@ -2725,92 +2779,92 @@ var OpenAIResponsesLanguageModel = class {
2725
2779
  body,
2726
2780
  failedResponseHandler: openaiFailedResponseHandler,
2727
2781
  successfulResponseHandler: createJsonResponseHandler6(
2728
- z16.object({
2729
- id: z16.string(),
2730
- created_at: z16.number(),
2731
- error: z16.object({
2732
- code: z16.string(),
2733
- message: z16.string()
2782
+ z17.object({
2783
+ id: z17.string(),
2784
+ created_at: z17.number(),
2785
+ error: z17.object({
2786
+ code: z17.string(),
2787
+ message: z17.string()
2734
2788
  }).nullish(),
2735
- model: z16.string(),
2736
- output: z16.array(
2737
- z16.discriminatedUnion("type", [
2738
- z16.object({
2739
- type: z16.literal("message"),
2740
- role: z16.literal("assistant"),
2741
- id: z16.string(),
2742
- content: z16.array(
2743
- z16.object({
2744
- type: z16.literal("output_text"),
2745
- text: z16.string(),
2789
+ model: z17.string(),
2790
+ output: z17.array(
2791
+ z17.discriminatedUnion("type", [
2792
+ z17.object({
2793
+ type: z17.literal("message"),
2794
+ role: z17.literal("assistant"),
2795
+ id: z17.string(),
2796
+ content: z17.array(
2797
+ z17.object({
2798
+ type: z17.literal("output_text"),
2799
+ text: z17.string(),
2746
2800
  logprobs: LOGPROBS_SCHEMA.nullish(),
2747
- annotations: z16.array(
2748
- z16.discriminatedUnion("type", [
2749
- z16.object({
2750
- type: z16.literal("url_citation"),
2751
- start_index: z16.number(),
2752
- end_index: z16.number(),
2753
- url: z16.string(),
2754
- title: z16.string()
2801
+ annotations: z17.array(
2802
+ z17.discriminatedUnion("type", [
2803
+ z17.object({
2804
+ type: z17.literal("url_citation"),
2805
+ start_index: z17.number(),
2806
+ end_index: z17.number(),
2807
+ url: z17.string(),
2808
+ title: z17.string()
2755
2809
  }),
2756
- z16.object({
2757
- type: z16.literal("file_citation"),
2758
- file_id: z16.string(),
2759
- filename: z16.string().nullish(),
2760
- index: z16.number().nullish(),
2761
- start_index: z16.number().nullish(),
2762
- end_index: z16.number().nullish(),
2763
- quote: z16.string().nullish()
2810
+ z17.object({
2811
+ type: z17.literal("file_citation"),
2812
+ file_id: z17.string(),
2813
+ filename: z17.string().nullish(),
2814
+ index: z17.number().nullish(),
2815
+ start_index: z17.number().nullish(),
2816
+ end_index: z17.number().nullish(),
2817
+ quote: z17.string().nullish()
2764
2818
  })
2765
2819
  ])
2766
2820
  )
2767
2821
  })
2768
2822
  )
2769
2823
  }),
2770
- z16.object({
2771
- type: z16.literal("function_call"),
2772
- call_id: z16.string(),
2773
- name: z16.string(),
2774
- arguments: z16.string(),
2775
- id: z16.string()
2824
+ z17.object({
2825
+ type: z17.literal("function_call"),
2826
+ call_id: z17.string(),
2827
+ name: z17.string(),
2828
+ arguments: z17.string(),
2829
+ id: z17.string()
2776
2830
  }),
2777
2831
  webSearchCallItem,
2778
- z16.object({
2779
- type: z16.literal("computer_call"),
2780
- id: z16.string(),
2781
- status: z16.string().optional()
2832
+ z17.object({
2833
+ type: z17.literal("computer_call"),
2834
+ id: z17.string(),
2835
+ status: z17.string().optional()
2782
2836
  }),
2783
- z16.object({
2784
- type: z16.literal("file_search_call"),
2785
- id: z16.string(),
2786
- status: z16.string().optional(),
2787
- queries: z16.array(z16.string()).nullish(),
2788
- results: z16.array(
2789
- z16.object({
2790
- attributes: z16.object({
2791
- file_id: z16.string(),
2792
- filename: z16.string(),
2793
- score: z16.number(),
2794
- text: z16.string()
2837
+ z17.object({
2838
+ type: z17.literal("file_search_call"),
2839
+ id: z17.string(),
2840
+ status: z17.string().optional(),
2841
+ queries: z17.array(z17.string()).nullish(),
2842
+ results: z17.array(
2843
+ z17.object({
2844
+ attributes: z17.object({
2845
+ file_id: z17.string(),
2846
+ filename: z17.string(),
2847
+ score: z17.number(),
2848
+ text: z17.string()
2795
2849
  })
2796
2850
  })
2797
2851
  ).nullish()
2798
2852
  }),
2799
- z16.object({
2800
- type: z16.literal("reasoning"),
2801
- id: z16.string(),
2802
- encrypted_content: z16.string().nullish(),
2803
- summary: z16.array(
2804
- z16.object({
2805
- type: z16.literal("summary_text"),
2806
- text: z16.string()
2853
+ z17.object({
2854
+ type: z17.literal("reasoning"),
2855
+ id: z17.string(),
2856
+ encrypted_content: z17.string().nullish(),
2857
+ summary: z17.array(
2858
+ z17.object({
2859
+ type: z17.literal("summary_text"),
2860
+ text: z17.string()
2807
2861
  })
2808
2862
  )
2809
2863
  })
2810
2864
  ])
2811
2865
  ),
2812
- service_tier: z16.string().nullish(),
2813
- incomplete_details: z16.object({ reason: z16.string() }).nullable(),
2866
+ service_tier: z17.string().nullish(),
2867
+ incomplete_details: z17.object({ reason: z17.string() }).nullable(),
2814
2868
  usage: usageSchema2
2815
2869
  })
2816
2870
  ),
@@ -2830,6 +2884,7 @@ var OpenAIResponsesLanguageModel = class {
2830
2884
  }
2831
2885
  const content = [];
2832
2886
  const logprobs = [];
2887
+ let hasFunctionCall = false;
2833
2888
  for (const part of response.output) {
2834
2889
  switch (part.type) {
2835
2890
  case "reasoning": {
@@ -2888,6 +2943,7 @@ var OpenAIResponsesLanguageModel = class {
2888
2943
  break;
2889
2944
  }
2890
2945
  case "function_call": {
2946
+ hasFunctionCall = true;
2891
2947
  content.push({
2892
2948
  type: "tool-call",
2893
2949
  toolCallId: part.call_id,
@@ -2975,7 +3031,7 @@ var OpenAIResponsesLanguageModel = class {
2975
3031
  content,
2976
3032
  finishReason: mapOpenAIResponseFinishReason({
2977
3033
  finishReason: (_m = response.incomplete_details) == null ? void 0 : _m.reason,
2978
- hasToolCalls: content.some((part) => part.type === "tool-call")
3034
+ hasFunctionCall
2979
3035
  }),
2980
3036
  usage: {
2981
3037
  inputTokens: response.usage.input_tokens,
@@ -3025,7 +3081,7 @@ var OpenAIResponsesLanguageModel = class {
3025
3081
  const logprobs = [];
3026
3082
  let responseId = null;
3027
3083
  const ongoingToolCalls = {};
3028
- let hasToolCalls = false;
3084
+ let hasFunctionCall = false;
3029
3085
  const activeReasoning = {};
3030
3086
  let serviceTier;
3031
3087
  return {
@@ -3115,7 +3171,7 @@ var OpenAIResponsesLanguageModel = class {
3115
3171
  } else if (isResponseOutputItemDoneChunk(value)) {
3116
3172
  if (value.item.type === "function_call") {
3117
3173
  ongoingToolCalls[value.output_index] = void 0;
3118
- hasToolCalls = true;
3174
+ hasFunctionCall = true;
3119
3175
  controller.enqueue({
3120
3176
  type: "tool-input-end",
3121
3177
  id: value.item.call_id
@@ -3133,7 +3189,6 @@ var OpenAIResponsesLanguageModel = class {
3133
3189
  });
3134
3190
  } else if (value.item.type === "web_search_call") {
3135
3191
  ongoingToolCalls[value.output_index] = void 0;
3136
- hasToolCalls = true;
3137
3192
  controller.enqueue({
3138
3193
  type: "tool-input-end",
3139
3194
  id: value.item.id
@@ -3141,20 +3196,19 @@ var OpenAIResponsesLanguageModel = class {
3141
3196
  controller.enqueue({
3142
3197
  type: "tool-call",
3143
3198
  toolCallId: value.item.id,
3144
- toolName: "web_search_preview",
3199
+ toolName: "web_search",
3145
3200
  input: JSON.stringify({ action: value.item.action }),
3146
3201
  providerExecuted: true
3147
3202
  });
3148
3203
  controller.enqueue({
3149
3204
  type: "tool-result",
3150
3205
  toolCallId: value.item.id,
3151
- toolName: "web_search_preview",
3206
+ toolName: "web_search",
3152
3207
  result: { status: value.item.status },
3153
3208
  providerExecuted: true
3154
3209
  });
3155
3210
  } else if (value.item.type === "computer_call") {
3156
3211
  ongoingToolCalls[value.output_index] = void 0;
3157
- hasToolCalls = true;
3158
3212
  controller.enqueue({
3159
3213
  type: "tool-input-end",
3160
3214
  id: value.item.id
@@ -3178,7 +3232,6 @@ var OpenAIResponsesLanguageModel = class {
3178
3232
  });
3179
3233
  } else if (value.item.type === "file_search_call") {
3180
3234
  ongoingToolCalls[value.output_index] = void 0;
3181
- hasToolCalls = true;
3182
3235
  controller.enqueue({
3183
3236
  type: "tool-input-end",
3184
3237
  id: value.item.id
@@ -3279,7 +3332,7 @@ var OpenAIResponsesLanguageModel = class {
3279
3332
  } else if (isResponseFinishedChunk(value)) {
3280
3333
  finishReason = mapOpenAIResponseFinishReason({
3281
3334
  finishReason: (_h = value.response.incomplete_details) == null ? void 0 : _h.reason,
3282
- hasToolCalls
3335
+ hasFunctionCall
3283
3336
  });
3284
3337
  usage.inputTokens = value.response.usage.input_tokens;
3285
3338
  usage.outputTokens = value.response.usage.output_tokens;
@@ -3338,176 +3391,176 @@ var OpenAIResponsesLanguageModel = class {
3338
3391
  };
3339
3392
  }
3340
3393
  };
3341
- var usageSchema2 = z16.object({
3342
- input_tokens: z16.number(),
3343
- input_tokens_details: z16.object({ cached_tokens: z16.number().nullish() }).nullish(),
3344
- output_tokens: z16.number(),
3345
- output_tokens_details: z16.object({ reasoning_tokens: z16.number().nullish() }).nullish()
3394
+ var usageSchema2 = z17.object({
3395
+ input_tokens: z17.number(),
3396
+ input_tokens_details: z17.object({ cached_tokens: z17.number().nullish() }).nullish(),
3397
+ output_tokens: z17.number(),
3398
+ output_tokens_details: z17.object({ reasoning_tokens: z17.number().nullish() }).nullish()
3346
3399
  });
3347
- var textDeltaChunkSchema = z16.object({
3348
- type: z16.literal("response.output_text.delta"),
3349
- item_id: z16.string(),
3350
- delta: z16.string(),
3400
+ var textDeltaChunkSchema = z17.object({
3401
+ type: z17.literal("response.output_text.delta"),
3402
+ item_id: z17.string(),
3403
+ delta: z17.string(),
3351
3404
  logprobs: LOGPROBS_SCHEMA.nullish()
3352
3405
  });
3353
- var errorChunkSchema = z16.object({
3354
- type: z16.literal("error"),
3355
- code: z16.string(),
3356
- message: z16.string(),
3357
- param: z16.string().nullish(),
3358
- sequence_number: z16.number()
3406
+ var errorChunkSchema = z17.object({
3407
+ type: z17.literal("error"),
3408
+ code: z17.string(),
3409
+ message: z17.string(),
3410
+ param: z17.string().nullish(),
3411
+ sequence_number: z17.number()
3359
3412
  });
3360
- var responseFinishedChunkSchema = z16.object({
3361
- type: z16.enum(["response.completed", "response.incomplete"]),
3362
- response: z16.object({
3363
- incomplete_details: z16.object({ reason: z16.string() }).nullish(),
3413
+ var responseFinishedChunkSchema = z17.object({
3414
+ type: z17.enum(["response.completed", "response.incomplete"]),
3415
+ response: z17.object({
3416
+ incomplete_details: z17.object({ reason: z17.string() }).nullish(),
3364
3417
  usage: usageSchema2,
3365
- service_tier: z16.string().nullish()
3418
+ service_tier: z17.string().nullish()
3366
3419
  })
3367
3420
  });
3368
- var responseCreatedChunkSchema = z16.object({
3369
- type: z16.literal("response.created"),
3370
- response: z16.object({
3371
- id: z16.string(),
3372
- created_at: z16.number(),
3373
- model: z16.string(),
3374
- service_tier: z16.string().nullish()
3421
+ var responseCreatedChunkSchema = z17.object({
3422
+ type: z17.literal("response.created"),
3423
+ response: z17.object({
3424
+ id: z17.string(),
3425
+ created_at: z17.number(),
3426
+ model: z17.string(),
3427
+ service_tier: z17.string().nullish()
3375
3428
  })
3376
3429
  });
3377
- var responseOutputItemAddedSchema = z16.object({
3378
- type: z16.literal("response.output_item.added"),
3379
- output_index: z16.number(),
3380
- item: z16.discriminatedUnion("type", [
3381
- z16.object({
3382
- type: z16.literal("message"),
3383
- id: z16.string()
3430
+ var responseOutputItemAddedSchema = z17.object({
3431
+ type: z17.literal("response.output_item.added"),
3432
+ output_index: z17.number(),
3433
+ item: z17.discriminatedUnion("type", [
3434
+ z17.object({
3435
+ type: z17.literal("message"),
3436
+ id: z17.string()
3384
3437
  }),
3385
- z16.object({
3386
- type: z16.literal("reasoning"),
3387
- id: z16.string(),
3388
- encrypted_content: z16.string().nullish()
3438
+ z17.object({
3439
+ type: z17.literal("reasoning"),
3440
+ id: z17.string(),
3441
+ encrypted_content: z17.string().nullish()
3389
3442
  }),
3390
- z16.object({
3391
- type: z16.literal("function_call"),
3392
- id: z16.string(),
3393
- call_id: z16.string(),
3394
- name: z16.string(),
3395
- arguments: z16.string()
3443
+ z17.object({
3444
+ type: z17.literal("function_call"),
3445
+ id: z17.string(),
3446
+ call_id: z17.string(),
3447
+ name: z17.string(),
3448
+ arguments: z17.string()
3396
3449
  }),
3397
- z16.object({
3398
- type: z16.literal("web_search_call"),
3399
- id: z16.string(),
3400
- status: z16.string(),
3401
- action: z16.object({
3402
- type: z16.literal("search"),
3403
- query: z16.string().optional()
3450
+ z17.object({
3451
+ type: z17.literal("web_search_call"),
3452
+ id: z17.string(),
3453
+ status: z17.string(),
3454
+ action: z17.object({
3455
+ type: z17.literal("search"),
3456
+ query: z17.string().optional()
3404
3457
  }).nullish()
3405
3458
  }),
3406
- z16.object({
3407
- type: z16.literal("computer_call"),
3408
- id: z16.string(),
3409
- status: z16.string()
3459
+ z17.object({
3460
+ type: z17.literal("computer_call"),
3461
+ id: z17.string(),
3462
+ status: z17.string()
3410
3463
  }),
3411
- z16.object({
3412
- type: z16.literal("file_search_call"),
3413
- id: z16.string(),
3414
- status: z16.string(),
3415
- queries: z16.array(z16.string()).nullish(),
3416
- results: z16.array(
3417
- z16.object({
3418
- attributes: z16.object({
3419
- file_id: z16.string(),
3420
- filename: z16.string(),
3421
- score: z16.number(),
3422
- text: z16.string()
3464
+ z17.object({
3465
+ type: z17.literal("file_search_call"),
3466
+ id: z17.string(),
3467
+ status: z17.string(),
3468
+ queries: z17.array(z17.string()).nullish(),
3469
+ results: z17.array(
3470
+ z17.object({
3471
+ attributes: z17.object({
3472
+ file_id: z17.string(),
3473
+ filename: z17.string(),
3474
+ score: z17.number(),
3475
+ text: z17.string()
3423
3476
  })
3424
3477
  })
3425
3478
  ).optional()
3426
3479
  })
3427
3480
  ])
3428
3481
  });
3429
- var responseOutputItemDoneSchema = z16.object({
3430
- type: z16.literal("response.output_item.done"),
3431
- output_index: z16.number(),
3432
- item: z16.discriminatedUnion("type", [
3433
- z16.object({
3434
- type: z16.literal("message"),
3435
- id: z16.string()
3482
+ var responseOutputItemDoneSchema = z17.object({
3483
+ type: z17.literal("response.output_item.done"),
3484
+ output_index: z17.number(),
3485
+ item: z17.discriminatedUnion("type", [
3486
+ z17.object({
3487
+ type: z17.literal("message"),
3488
+ id: z17.string()
3436
3489
  }),
3437
- z16.object({
3438
- type: z16.literal("reasoning"),
3439
- id: z16.string(),
3440
- encrypted_content: z16.string().nullish()
3490
+ z17.object({
3491
+ type: z17.literal("reasoning"),
3492
+ id: z17.string(),
3493
+ encrypted_content: z17.string().nullish()
3441
3494
  }),
3442
- z16.object({
3443
- type: z16.literal("function_call"),
3444
- id: z16.string(),
3445
- call_id: z16.string(),
3446
- name: z16.string(),
3447
- arguments: z16.string(),
3448
- status: z16.literal("completed")
3495
+ z17.object({
3496
+ type: z17.literal("function_call"),
3497
+ id: z17.string(),
3498
+ call_id: z17.string(),
3499
+ name: z17.string(),
3500
+ arguments: z17.string(),
3501
+ status: z17.literal("completed")
3449
3502
  }),
3450
3503
  webSearchCallItem,
3451
- z16.object({
3452
- type: z16.literal("computer_call"),
3453
- id: z16.string(),
3454
- status: z16.literal("completed")
3504
+ z17.object({
3505
+ type: z17.literal("computer_call"),
3506
+ id: z17.string(),
3507
+ status: z17.literal("completed")
3455
3508
  }),
3456
- z16.object({
3457
- type: z16.literal("file_search_call"),
3458
- id: z16.string(),
3459
- status: z16.literal("completed"),
3460
- queries: z16.array(z16.string()).nullish(),
3461
- results: z16.array(
3462
- z16.object({
3463
- attributes: z16.object({
3464
- file_id: z16.string(),
3465
- filename: z16.string(),
3466
- score: z16.number(),
3467
- text: z16.string()
3509
+ z17.object({
3510
+ type: z17.literal("file_search_call"),
3511
+ id: z17.string(),
3512
+ status: z17.literal("completed"),
3513
+ queries: z17.array(z17.string()).nullish(),
3514
+ results: z17.array(
3515
+ z17.object({
3516
+ attributes: z17.object({
3517
+ file_id: z17.string(),
3518
+ filename: z17.string(),
3519
+ score: z17.number(),
3520
+ text: z17.string()
3468
3521
  })
3469
3522
  })
3470
3523
  ).nullish()
3471
3524
  })
3472
3525
  ])
3473
3526
  });
3474
- var responseFunctionCallArgumentsDeltaSchema = z16.object({
3475
- type: z16.literal("response.function_call_arguments.delta"),
3476
- item_id: z16.string(),
3477
- output_index: z16.number(),
3478
- delta: z16.string()
3527
+ var responseFunctionCallArgumentsDeltaSchema = z17.object({
3528
+ type: z17.literal("response.function_call_arguments.delta"),
3529
+ item_id: z17.string(),
3530
+ output_index: z17.number(),
3531
+ delta: z17.string()
3479
3532
  });
3480
- var responseAnnotationAddedSchema = z16.object({
3481
- type: z16.literal("response.output_text.annotation.added"),
3482
- annotation: z16.discriminatedUnion("type", [
3483
- z16.object({
3484
- type: z16.literal("url_citation"),
3485
- url: z16.string(),
3486
- title: z16.string()
3533
+ var responseAnnotationAddedSchema = z17.object({
3534
+ type: z17.literal("response.output_text.annotation.added"),
3535
+ annotation: z17.discriminatedUnion("type", [
3536
+ z17.object({
3537
+ type: z17.literal("url_citation"),
3538
+ url: z17.string(),
3539
+ title: z17.string()
3487
3540
  }),
3488
- z16.object({
3489
- type: z16.literal("file_citation"),
3490
- file_id: z16.string(),
3491
- filename: z16.string().nullish(),
3492
- index: z16.number().nullish(),
3493
- start_index: z16.number().nullish(),
3494
- end_index: z16.number().nullish(),
3495
- quote: z16.string().nullish()
3541
+ z17.object({
3542
+ type: z17.literal("file_citation"),
3543
+ file_id: z17.string(),
3544
+ filename: z17.string().nullish(),
3545
+ index: z17.number().nullish(),
3546
+ start_index: z17.number().nullish(),
3547
+ end_index: z17.number().nullish(),
3548
+ quote: z17.string().nullish()
3496
3549
  })
3497
3550
  ])
3498
3551
  });
3499
- var responseReasoningSummaryPartAddedSchema = z16.object({
3500
- type: z16.literal("response.reasoning_summary_part.added"),
3501
- item_id: z16.string(),
3502
- summary_index: z16.number()
3552
+ var responseReasoningSummaryPartAddedSchema = z17.object({
3553
+ type: z17.literal("response.reasoning_summary_part.added"),
3554
+ item_id: z17.string(),
3555
+ summary_index: z17.number()
3503
3556
  });
3504
- var responseReasoningSummaryTextDeltaSchema = z16.object({
3505
- type: z16.literal("response.reasoning_summary_text.delta"),
3506
- item_id: z16.string(),
3507
- summary_index: z16.number(),
3508
- delta: z16.string()
3557
+ var responseReasoningSummaryTextDeltaSchema = z17.object({
3558
+ type: z17.literal("response.reasoning_summary_text.delta"),
3559
+ item_id: z17.string(),
3560
+ summary_index: z17.number(),
3561
+ delta: z17.string()
3509
3562
  });
3510
- var openaiResponsesChunkSchema = z16.union([
3563
+ var openaiResponsesChunkSchema = z17.union([
3511
3564
  textDeltaChunkSchema,
3512
3565
  responseFinishedChunkSchema,
3513
3566
  responseCreatedChunkSchema,
@@ -3518,7 +3571,7 @@ var openaiResponsesChunkSchema = z16.union([
3518
3571
  responseReasoningSummaryPartAddedSchema,
3519
3572
  responseReasoningSummaryTextDeltaSchema,
3520
3573
  errorChunkSchema,
3521
- z16.object({ type: z16.string() }).loose()
3574
+ z17.object({ type: z17.string() }).loose()
3522
3575
  // fallback for unknown chunks
3523
3576
  ]);
3524
3577
  function isTextDeltaChunk(chunk) {
@@ -3591,27 +3644,27 @@ function getResponsesModelConfig(modelId) {
3591
3644
  isReasoningModel: false
3592
3645
  };
3593
3646
  }
3594
- var openaiResponsesProviderOptionsSchema = z16.object({
3595
- metadata: z16.any().nullish(),
3596
- parallelToolCalls: z16.boolean().nullish(),
3597
- previousResponseId: z16.string().nullish(),
3598
- store: z16.boolean().nullish(),
3599
- user: z16.string().nullish(),
3600
- reasoningEffort: z16.string().nullish(),
3601
- strictJsonSchema: z16.boolean().nullish(),
3602
- instructions: z16.string().nullish(),
3603
- reasoningSummary: z16.string().nullish(),
3604
- serviceTier: z16.enum(["auto", "flex", "priority"]).nullish(),
3605
- include: z16.array(
3606
- z16.enum([
3647
+ var openaiResponsesProviderOptionsSchema = z17.object({
3648
+ metadata: z17.any().nullish(),
3649
+ parallelToolCalls: z17.boolean().nullish(),
3650
+ previousResponseId: z17.string().nullish(),
3651
+ store: z17.boolean().nullish(),
3652
+ user: z17.string().nullish(),
3653
+ reasoningEffort: z17.string().nullish(),
3654
+ strictJsonSchema: z17.boolean().nullish(),
3655
+ instructions: z17.string().nullish(),
3656
+ reasoningSummary: z17.string().nullish(),
3657
+ serviceTier: z17.enum(["auto", "flex", "priority"]).nullish(),
3658
+ include: z17.array(
3659
+ z17.enum([
3607
3660
  "reasoning.encrypted_content",
3608
3661
  "file_search_call.results",
3609
3662
  "message.output_text.logprobs"
3610
3663
  ])
3611
3664
  ).nullish(),
3612
- textVerbosity: z16.enum(["low", "medium", "high"]).nullish(),
3613
- promptCacheKey: z16.string().nullish(),
3614
- safetyIdentifier: z16.string().nullish(),
3665
+ textVerbosity: z17.enum(["low", "medium", "high"]).nullish(),
3666
+ promptCacheKey: z17.string().nullish(),
3667
+ safetyIdentifier: z17.string().nullish(),
3615
3668
  /**
3616
3669
  * Return the log probabilities of the tokens.
3617
3670
  *
@@ -3624,7 +3677,7 @@ var openaiResponsesProviderOptionsSchema = z16.object({
3624
3677
  * @see https://platform.openai.com/docs/api-reference/responses/create
3625
3678
  * @see https://cookbook.openai.com/examples/using_logprobs
3626
3679
  */
3627
- logprobs: z16.union([z16.boolean(), z16.number().min(1).max(TOP_LOGPROBS_MAX)]).optional()
3680
+ logprobs: z17.union([z17.boolean(), z17.number().min(1).max(TOP_LOGPROBS_MAX)]).optional()
3628
3681
  });
3629
3682
  export {
3630
3683
  OpenAIChatLanguageModel,