@ai-sdk/openai 2.0.27 → 2.0.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2391,7 +2391,7 @@ var codeInterpreterArgsSchema = z15.object({
2391
2391
  })
2392
2392
  ]).optional()
2393
2393
  });
2394
- var codeInterpreter = createProviderDefinedToolFactory3({
2394
+ var codeInterpreterToolFactory = createProviderDefinedToolFactory3({
2395
2395
  id: "openai.code_interpreter",
2396
2396
  name: "code_interpreter",
2397
2397
  inputSchema: z15.object({})
@@ -2413,7 +2413,7 @@ var webSearchArgsSchema = z16.object({
2413
2413
  timezone: z16.string().optional()
2414
2414
  }).optional()
2415
2415
  });
2416
- var factory = createProviderDefinedToolFactory4({
2416
+ var webSearchToolFactory = createProviderDefinedToolFactory4({
2417
2417
  id: "openai.web_search",
2418
2418
  name: "web_search",
2419
2419
  inputSchema: z16.object({
@@ -2596,7 +2596,7 @@ var OpenAIResponsesLanguageModel = class {
2596
2596
  toolChoice,
2597
2597
  responseFormat
2598
2598
  }) {
2599
- var _a, _b;
2599
+ var _a, _b, _c;
2600
2600
  const warnings = [];
2601
2601
  const modelConfig = getResponsesModelConfig(this.modelId);
2602
2602
  if (topK != null) {
@@ -2632,8 +2632,13 @@ var OpenAIResponsesLanguageModel = class {
2632
2632
  schema: openaiResponsesProviderOptionsSchema
2633
2633
  });
2634
2634
  const strictJsonSchema = (_a = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _a : false;
2635
+ let include = openaiOptions == null ? void 0 : openaiOptions.include;
2635
2636
  const topLogprobs = typeof (openaiOptions == null ? void 0 : openaiOptions.logprobs) === "number" ? openaiOptions == null ? void 0 : openaiOptions.logprobs : (openaiOptions == null ? void 0 : openaiOptions.logprobs) === true ? TOP_LOGPROBS_MAX : void 0;
2636
- const openaiOptionsInclude = topLogprobs ? Array.isArray(openaiOptions == null ? void 0 : openaiOptions.include) ? [...openaiOptions == null ? void 0 : openaiOptions.include, "message.output_text.logprobs"] : ["message.output_text.logprobs"] : openaiOptions == null ? void 0 : openaiOptions.include;
2637
+ include = topLogprobs ? Array.isArray(include) ? [...include, "message.output_text.logprobs"] : ["message.output_text.logprobs"] : include;
2638
+ const webSearchToolName = (_b = tools == null ? void 0 : tools.find(
2639
+ (tool) => tool.type === "provider-defined" && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview")
2640
+ )) == null ? void 0 : _b.name;
2641
+ include = webSearchToolName ? Array.isArray(include) ? [...include, "web_search_call.action.sources"] : ["web_search_call.action.sources"] : include;
2637
2642
  const baseArgs = {
2638
2643
  model: this.modelId,
2639
2644
  input: messages,
@@ -2646,7 +2651,7 @@ var OpenAIResponsesLanguageModel = class {
2646
2651
  format: responseFormat.schema != null ? {
2647
2652
  type: "json_schema",
2648
2653
  strict: strictJsonSchema,
2649
- name: (_b = responseFormat.name) != null ? _b : "response",
2654
+ name: (_c = responseFormat.name) != null ? _c : "response",
2650
2655
  description: responseFormat.description,
2651
2656
  schema: responseFormat.schema
2652
2657
  } : { type: "json_object" }
@@ -2664,7 +2669,7 @@ var OpenAIResponsesLanguageModel = class {
2664
2669
  user: openaiOptions == null ? void 0 : openaiOptions.user,
2665
2670
  instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
2666
2671
  service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
2667
- include: openaiOptionsInclude,
2672
+ include,
2668
2673
  prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
2669
2674
  safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
2670
2675
  top_logprobs: topLogprobs,
@@ -2742,6 +2747,7 @@ var OpenAIResponsesLanguageModel = class {
2742
2747
  strictJsonSchema
2743
2748
  });
2744
2749
  return {
2750
+ webSearchToolName,
2745
2751
  args: {
2746
2752
  ...baseArgs,
2747
2753
  tools: openaiTools,
@@ -2752,7 +2758,11 @@ var OpenAIResponsesLanguageModel = class {
2752
2758
  }
2753
2759
  async doGenerate(options) {
2754
2760
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
2755
- const { args: body, warnings } = await this.getArgs(options);
2761
+ const {
2762
+ args: body,
2763
+ warnings,
2764
+ webSearchToolName
2765
+ } = await this.getArgs(options);
2756
2766
  const url = this.config.url({
2757
2767
  path: "/responses",
2758
2768
  modelId: this.modelId
@@ -2803,12 +2813,18 @@ var OpenAIResponsesLanguageModel = class {
2803
2813
  start_index: z17.number().nullish(),
2804
2814
  end_index: z17.number().nullish(),
2805
2815
  quote: z17.string().nullish()
2816
+ }),
2817
+ z17.object({
2818
+ type: z17.literal("container_file_citation")
2806
2819
  })
2807
2820
  ])
2808
2821
  )
2809
2822
  })
2810
2823
  )
2811
2824
  }),
2825
+ z17.object({
2826
+ type: z17.literal("code_interpreter_call")
2827
+ }),
2812
2828
  z17.object({
2813
2829
  type: z17.literal("function_call"),
2814
2830
  call_id: z17.string(),
@@ -2949,14 +2965,14 @@ var OpenAIResponsesLanguageModel = class {
2949
2965
  content.push({
2950
2966
  type: "tool-call",
2951
2967
  toolCallId: part.id,
2952
- toolName: "web_search_preview",
2968
+ toolName: webSearchToolName != null ? webSearchToolName : "web_search",
2953
2969
  input: JSON.stringify({ action: part.action }),
2954
2970
  providerExecuted: true
2955
2971
  });
2956
2972
  content.push({
2957
2973
  type: "tool-result",
2958
2974
  toolCallId: part.id,
2959
- toolName: "web_search_preview",
2975
+ toolName: webSearchToolName != null ? webSearchToolName : "web_search",
2960
2976
  result: { status: part.status },
2961
2977
  providerExecuted: true
2962
2978
  });
@@ -3041,7 +3057,11 @@ var OpenAIResponsesLanguageModel = class {
3041
3057
  };
3042
3058
  }
3043
3059
  async doStream(options) {
3044
- const { args: body, warnings } = await this.getArgs(options);
3060
+ const {
3061
+ args: body,
3062
+ warnings,
3063
+ webSearchToolName
3064
+ } = await this.getArgs(options);
3045
3065
  const { responseHeaders, value: response } = await postJsonToApi6({
3046
3066
  url: this.config.url({
3047
3067
  path: "/responses",
@@ -3102,13 +3122,13 @@ var OpenAIResponsesLanguageModel = class {
3102
3122
  });
3103
3123
  } else if (value.item.type === "web_search_call") {
3104
3124
  ongoingToolCalls[value.output_index] = {
3105
- toolName: "web_search_preview",
3125
+ toolName: webSearchToolName != null ? webSearchToolName : "web_search",
3106
3126
  toolCallId: value.item.id
3107
3127
  };
3108
3128
  controller.enqueue({
3109
3129
  type: "tool-input-start",
3110
3130
  id: value.item.id,
3111
- toolName: "web_search_preview"
3131
+ toolName: webSearchToolName != null ? webSearchToolName : "web_search"
3112
3132
  });
3113
3133
  } else if (value.item.type === "computer_call") {
3114
3134
  ongoingToolCalls[value.output_index] = {