@ai-sdk/openai 2.0.26 → 2.0.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -231,9 +231,9 @@ declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
231
231
  priority: "priority";
232
232
  }>>>;
233
233
  include: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodEnum<{
234
- "reasoning.encrypted_content": "reasoning.encrypted_content";
235
234
  "file_search_call.results": "file_search_call.results";
236
235
  "message.output_text.logprobs": "message.output_text.logprobs";
236
+ "reasoning.encrypted_content": "reasoning.encrypted_content";
237
237
  }>>>>;
238
238
  textVerbosity: z.ZodOptional<z.ZodNullable<z.ZodEnum<{
239
239
  low: "low";
@@ -231,9 +231,9 @@ declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
231
231
  priority: "priority";
232
232
  }>>>;
233
233
  include: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodEnum<{
234
- "reasoning.encrypted_content": "reasoning.encrypted_content";
235
234
  "file_search_call.results": "file_search_call.results";
236
235
  "message.output_text.logprobs": "message.output_text.logprobs";
236
+ "reasoning.encrypted_content": "reasoning.encrypted_content";
237
237
  }>>>>;
238
238
  textVerbosity: z.ZodOptional<z.ZodNullable<z.ZodEnum<{
239
239
  low: "low";
@@ -377,23 +377,11 @@ var compoundFilterSchema = import_v43.z.object({
377
377
  });
378
378
  var filtersSchema = import_v43.z.union([comparisonFilterSchema, compoundFilterSchema]);
379
379
  var fileSearchArgsSchema = import_v43.z.object({
380
- /**
381
- * List of vector store IDs to search through. If not provided, searches all available vector stores.
382
- */
383
380
  vectorStoreIds: import_v43.z.array(import_v43.z.string()).optional(),
384
- /**
385
- * Maximum number of search results to return. Defaults to 10.
386
- */
387
381
  maxNumResults: import_v43.z.number().optional(),
388
- /**
389
- * Ranking options for the search.
390
- */
391
382
  ranking: import_v43.z.object({
392
383
  ranker: import_v43.z.enum(["auto", "default-2024-08-21"]).optional()
393
384
  }).optional(),
394
- /**
395
- * A filter to apply based on file attributes.
396
- */
397
385
  filters: filtersSchema.optional()
398
386
  });
399
387
  var fileSearch = (0, import_provider_utils3.createProviderDefinedToolFactory)({
@@ -2382,7 +2370,7 @@ var codeInterpreterArgsSchema = import_v415.z.object({
2382
2370
  })
2383
2371
  ]).optional()
2384
2372
  });
2385
- var codeInterpreter = (0, import_provider_utils13.createProviderDefinedToolFactory)({
2373
+ var codeInterpreterToolFactory = (0, import_provider_utils13.createProviderDefinedToolFactory)({
2386
2374
  id: "openai.code_interpreter",
2387
2375
  name: "code_interpreter",
2388
2376
  inputSchema: import_v415.z.object({})
@@ -2404,7 +2392,7 @@ var webSearchArgsSchema = import_v416.z.object({
2404
2392
  timezone: import_v416.z.string().optional()
2405
2393
  }).optional()
2406
2394
  });
2407
- var factory = (0, import_provider_utils14.createProviderDefinedToolFactory)({
2395
+ var webSearchToolFactory = (0, import_provider_utils14.createProviderDefinedToolFactory)({
2408
2396
  id: "openai.web_search",
2409
2397
  name: "web_search",
2410
2398
  inputSchema: import_v416.z.object({
@@ -2587,7 +2575,7 @@ var OpenAIResponsesLanguageModel = class {
2587
2575
  toolChoice,
2588
2576
  responseFormat
2589
2577
  }) {
2590
- var _a, _b;
2578
+ var _a, _b, _c;
2591
2579
  const warnings = [];
2592
2580
  const modelConfig = getResponsesModelConfig(this.modelId);
2593
2581
  if (topK != null) {
@@ -2623,8 +2611,13 @@ var OpenAIResponsesLanguageModel = class {
2623
2611
  schema: openaiResponsesProviderOptionsSchema
2624
2612
  });
2625
2613
  const strictJsonSchema = (_a = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _a : false;
2614
+ let include = openaiOptions == null ? void 0 : openaiOptions.include;
2626
2615
  const topLogprobs = typeof (openaiOptions == null ? void 0 : openaiOptions.logprobs) === "number" ? openaiOptions == null ? void 0 : openaiOptions.logprobs : (openaiOptions == null ? void 0 : openaiOptions.logprobs) === true ? TOP_LOGPROBS_MAX : void 0;
2627
- const openaiOptionsInclude = topLogprobs ? Array.isArray(openaiOptions == null ? void 0 : openaiOptions.include) ? [...openaiOptions == null ? void 0 : openaiOptions.include, "message.output_text.logprobs"] : ["message.output_text.logprobs"] : openaiOptions == null ? void 0 : openaiOptions.include;
2616
+ include = topLogprobs ? Array.isArray(include) ? [...include, "message.output_text.logprobs"] : ["message.output_text.logprobs"] : include;
2617
+ const webSearchToolName = (_b = tools == null ? void 0 : tools.find(
2618
+ (tool) => tool.type === "provider-defined" && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview")
2619
+ )) == null ? void 0 : _b.name;
2620
+ include = webSearchToolName ? Array.isArray(include) ? [...include, "web_search_call.action.sources"] : ["web_search_call.action.sources"] : include;
2628
2621
  const baseArgs = {
2629
2622
  model: this.modelId,
2630
2623
  input: messages,
@@ -2637,7 +2630,7 @@ var OpenAIResponsesLanguageModel = class {
2637
2630
  format: responseFormat.schema != null ? {
2638
2631
  type: "json_schema",
2639
2632
  strict: strictJsonSchema,
2640
- name: (_b = responseFormat.name) != null ? _b : "response",
2633
+ name: (_c = responseFormat.name) != null ? _c : "response",
2641
2634
  description: responseFormat.description,
2642
2635
  schema: responseFormat.schema
2643
2636
  } : { type: "json_object" }
@@ -2655,7 +2648,7 @@ var OpenAIResponsesLanguageModel = class {
2655
2648
  user: openaiOptions == null ? void 0 : openaiOptions.user,
2656
2649
  instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
2657
2650
  service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
2658
- include: openaiOptionsInclude,
2651
+ include,
2659
2652
  prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
2660
2653
  safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
2661
2654
  top_logprobs: topLogprobs,
@@ -2733,6 +2726,7 @@ var OpenAIResponsesLanguageModel = class {
2733
2726
  strictJsonSchema
2734
2727
  });
2735
2728
  return {
2729
+ webSearchToolName,
2736
2730
  args: {
2737
2731
  ...baseArgs,
2738
2732
  tools: openaiTools,
@@ -2743,7 +2737,11 @@ var OpenAIResponsesLanguageModel = class {
2743
2737
  }
2744
2738
  async doGenerate(options) {
2745
2739
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
2746
- const { args: body, warnings } = await this.getArgs(options);
2740
+ const {
2741
+ args: body,
2742
+ warnings,
2743
+ webSearchToolName
2744
+ } = await this.getArgs(options);
2747
2745
  const url = this.config.url({
2748
2746
  path: "/responses",
2749
2747
  modelId: this.modelId
@@ -2794,12 +2792,18 @@ var OpenAIResponsesLanguageModel = class {
2794
2792
  start_index: import_v417.z.number().nullish(),
2795
2793
  end_index: import_v417.z.number().nullish(),
2796
2794
  quote: import_v417.z.string().nullish()
2795
+ }),
2796
+ import_v417.z.object({
2797
+ type: import_v417.z.literal("container_file_citation")
2797
2798
  })
2798
2799
  ])
2799
2800
  )
2800
2801
  })
2801
2802
  )
2802
2803
  }),
2804
+ import_v417.z.object({
2805
+ type: import_v417.z.literal("code_interpreter_call")
2806
+ }),
2803
2807
  import_v417.z.object({
2804
2808
  type: import_v417.z.literal("function_call"),
2805
2809
  call_id: import_v417.z.string(),
@@ -2940,14 +2944,14 @@ var OpenAIResponsesLanguageModel = class {
2940
2944
  content.push({
2941
2945
  type: "tool-call",
2942
2946
  toolCallId: part.id,
2943
- toolName: "web_search_preview",
2947
+ toolName: webSearchToolName != null ? webSearchToolName : "web_search",
2944
2948
  input: JSON.stringify({ action: part.action }),
2945
2949
  providerExecuted: true
2946
2950
  });
2947
2951
  content.push({
2948
2952
  type: "tool-result",
2949
2953
  toolCallId: part.id,
2950
- toolName: "web_search_preview",
2954
+ toolName: webSearchToolName != null ? webSearchToolName : "web_search",
2951
2955
  result: { status: part.status },
2952
2956
  providerExecuted: true
2953
2957
  });
@@ -3032,7 +3036,11 @@ var OpenAIResponsesLanguageModel = class {
3032
3036
  };
3033
3037
  }
3034
3038
  async doStream(options) {
3035
- const { args: body, warnings } = await this.getArgs(options);
3039
+ const {
3040
+ args: body,
3041
+ warnings,
3042
+ webSearchToolName
3043
+ } = await this.getArgs(options);
3036
3044
  const { responseHeaders, value: response } = await (0, import_provider_utils15.postJsonToApi)({
3037
3045
  url: this.config.url({
3038
3046
  path: "/responses",
@@ -3093,13 +3101,13 @@ var OpenAIResponsesLanguageModel = class {
3093
3101
  });
3094
3102
  } else if (value.item.type === "web_search_call") {
3095
3103
  ongoingToolCalls[value.output_index] = {
3096
- toolName: "web_search_preview",
3104
+ toolName: webSearchToolName != null ? webSearchToolName : "web_search",
3097
3105
  toolCallId: value.item.id
3098
3106
  };
3099
3107
  controller.enqueue({
3100
3108
  type: "tool-input-start",
3101
3109
  id: value.item.id,
3102
- toolName: "web_search_preview"
3110
+ toolName: webSearchToolName != null ? webSearchToolName : "web_search"
3103
3111
  });
3104
3112
  } else if (value.item.type === "computer_call") {
3105
3113
  ongoingToolCalls[value.output_index] = {