@ai-sdk/openai 3.0.0-beta.27 → 3.0.0-beta.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2375,6 +2375,9 @@ async function convertToOpenAIResponsesInput({
2375
2375
  input.push(reasoningMessages[reasoningId]);
2376
2376
  } else {
2377
2377
  reasoningMessage.summary.push(...summaryParts);
2378
+ if ((providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent) != null) {
2379
+ reasoningMessage.encrypted_content = providerOptions.reasoningEncryptedContent;
2380
+ }
2378
2381
  }
2379
2382
  }
2380
2383
  } else {
@@ -2544,11 +2547,7 @@ var openaiResponsesChunkSchema = lazySchema12(
2544
2547
  z14.object({
2545
2548
  type: z14.literal("web_search_call"),
2546
2549
  id: z14.string(),
2547
- status: z14.string(),
2548
- action: z14.object({
2549
- type: z14.literal("search"),
2550
- query: z14.string().optional()
2551
- }).nullish()
2550
+ status: z14.string()
2552
2551
  }),
2553
2552
  z14.object({
2554
2553
  type: z14.literal("computer_call"),
@@ -2634,7 +2633,7 @@ var openaiResponsesChunkSchema = lazySchema12(
2634
2633
  url: z14.string(),
2635
2634
  pattern: z14.string()
2636
2635
  })
2637
- ]).nullish()
2636
+ ])
2638
2637
  }),
2639
2638
  z14.object({
2640
2639
  type: z14.literal("file_search_call"),
@@ -2724,6 +2723,11 @@ var openaiResponsesChunkSchema = lazySchema12(
2724
2723
  summary_index: z14.number(),
2725
2724
  delta: z14.string()
2726
2725
  }),
2726
+ z14.object({
2727
+ type: z14.literal("response.reasoning_summary_part.done"),
2728
+ item_id: z14.string(),
2729
+ summary_index: z14.number()
2730
+ }),
2727
2731
  z14.object({
2728
2732
  type: z14.literal("error"),
2729
2733
  code: z14.string(),
@@ -2815,7 +2819,7 @@ var openaiResponsesResponseSchema = lazySchema12(
2815
2819
  url: z14.string(),
2816
2820
  pattern: z14.string()
2817
2821
  })
2818
- ]).nullish()
2822
+ ])
2819
2823
  }),
2820
2824
  z14.object({
2821
2825
  type: z14.literal("file_search_call"),
@@ -3116,7 +3120,7 @@ var fileSearch = createProviderDefinedToolFactoryWithOutputSchema3({
3116
3120
 
3117
3121
  // src/tool/web-search.ts
3118
3122
  import {
3119
- createProviderDefinedToolFactory,
3123
+ createProviderDefinedToolFactoryWithOutputSchema as createProviderDefinedToolFactoryWithOutputSchema4,
3120
3124
  lazySchema as lazySchema16,
3121
3125
  zodSchema as zodSchema16
3122
3126
  } from "@ai-sdk/provider-utils";
@@ -3124,9 +3128,7 @@ import { z as z18 } from "zod/v4";
3124
3128
  var webSearchArgsSchema = lazySchema16(
3125
3129
  () => zodSchema16(
3126
3130
  z18.object({
3127
- filters: z18.object({
3128
- allowedDomains: z18.array(z18.string()).optional()
3129
- }).optional(),
3131
+ filters: z18.object({ allowedDomains: z18.array(z18.string()).optional() }).optional(),
3130
3132
  searchContextSize: z18.enum(["low", "medium", "high"]).optional(),
3131
3133
  userLocation: z18.object({
3132
3134
  type: z18.literal("approximate"),
@@ -3138,16 +3140,17 @@ var webSearchArgsSchema = lazySchema16(
3138
3140
  })
3139
3141
  )
3140
3142
  );
3141
- var webSearchInputSchema = lazySchema16(
3143
+ var webSearchInputSchema = lazySchema16(() => zodSchema16(z18.object({})));
3144
+ var webSearchOutputSchema = lazySchema16(
3142
3145
  () => zodSchema16(
3143
3146
  z18.object({
3144
3147
  action: z18.discriminatedUnion("type", [
3145
3148
  z18.object({
3146
3149
  type: z18.literal("search"),
3147
- query: z18.string().nullish()
3150
+ query: z18.string().optional()
3148
3151
  }),
3149
3152
  z18.object({
3150
- type: z18.literal("open_page"),
3153
+ type: z18.literal("openPage"),
3151
3154
  url: z18.string()
3152
3155
  }),
3153
3156
  z18.object({
@@ -3155,19 +3158,20 @@ var webSearchInputSchema = lazySchema16(
3155
3158
  url: z18.string(),
3156
3159
  pattern: z18.string()
3157
3160
  })
3158
- ]).nullish()
3161
+ ])
3159
3162
  })
3160
3163
  )
3161
3164
  );
3162
- var webSearchToolFactory = createProviderDefinedToolFactory({
3165
+ var webSearchToolFactory = createProviderDefinedToolFactoryWithOutputSchema4({
3163
3166
  id: "openai.web_search",
3164
3167
  name: "web_search",
3165
- inputSchema: webSearchInputSchema
3168
+ inputSchema: webSearchInputSchema,
3169
+ outputSchema: webSearchOutputSchema
3166
3170
  });
3167
3171
 
3168
3172
  // src/tool/web-search-preview.ts
3169
3173
  import {
3170
- createProviderDefinedToolFactory as createProviderDefinedToolFactory2,
3174
+ createProviderDefinedToolFactoryWithOutputSchema as createProviderDefinedToolFactoryWithOutputSchema5,
3171
3175
  lazySchema as lazySchema17,
3172
3176
  zodSchema as zodSchema17
3173
3177
  } from "@ai-sdk/provider-utils";
@@ -3175,51 +3179,30 @@ import { z as z19 } from "zod/v4";
3175
3179
  var webSearchPreviewArgsSchema = lazySchema17(
3176
3180
  () => zodSchema17(
3177
3181
  z19.object({
3178
- /**
3179
- * Search context size to use for the web search.
3180
- * - high: Most comprehensive context, highest cost, slower response
3181
- * - medium: Balanced context, cost, and latency (default)
3182
- * - low: Least context, lowest cost, fastest response
3183
- */
3184
3182
  searchContextSize: z19.enum(["low", "medium", "high"]).optional(),
3185
- /**
3186
- * User location information to provide geographically relevant search results.
3187
- */
3188
3183
  userLocation: z19.object({
3189
- /**
3190
- * Type of location (always 'approximate')
3191
- */
3192
3184
  type: z19.literal("approximate"),
3193
- /**
3194
- * Two-letter ISO country code (e.g., 'US', 'GB')
3195
- */
3196
3185
  country: z19.string().optional(),
3197
- /**
3198
- * City name (free text, e.g., 'Minneapolis')
3199
- */
3200
3186
  city: z19.string().optional(),
3201
- /**
3202
- * Region name (free text, e.g., 'Minnesota')
3203
- */
3204
3187
  region: z19.string().optional(),
3205
- /**
3206
- * IANA timezone (e.g., 'America/Chicago')
3207
- */
3208
3188
  timezone: z19.string().optional()
3209
3189
  }).optional()
3210
3190
  })
3211
3191
  )
3212
3192
  );
3213
3193
  var webSearchPreviewInputSchema = lazySchema17(
3194
+ () => zodSchema17(z19.object({}))
3195
+ );
3196
+ var webSearchPreviewOutputSchema = lazySchema17(
3214
3197
  () => zodSchema17(
3215
3198
  z19.object({
3216
3199
  action: z19.discriminatedUnion("type", [
3217
3200
  z19.object({
3218
3201
  type: z19.literal("search"),
3219
- query: z19.string().nullish()
3202
+ query: z19.string().optional()
3220
3203
  }),
3221
3204
  z19.object({
3222
- type: z19.literal("open_page"),
3205
+ type: z19.literal("openPage"),
3223
3206
  url: z19.string()
3224
3207
  }),
3225
3208
  z19.object({
@@ -3227,19 +3210,20 @@ var webSearchPreviewInputSchema = lazySchema17(
3227
3210
  url: z19.string(),
3228
3211
  pattern: z19.string()
3229
3212
  })
3230
- ]).nullish()
3213
+ ])
3231
3214
  })
3232
3215
  )
3233
3216
  );
3234
- var webSearchPreview = createProviderDefinedToolFactory2({
3217
+ var webSearchPreview = createProviderDefinedToolFactoryWithOutputSchema5({
3235
3218
  id: "openai.web_search_preview",
3236
3219
  name: "web_search_preview",
3237
- inputSchema: webSearchPreviewInputSchema
3220
+ inputSchema: webSearchPreviewInputSchema,
3221
+ outputSchema: webSearchPreviewOutputSchema
3238
3222
  });
3239
3223
 
3240
3224
  // src/tool/image-generation.ts
3241
3225
  import {
3242
- createProviderDefinedToolFactoryWithOutputSchema as createProviderDefinedToolFactoryWithOutputSchema4,
3226
+ createProviderDefinedToolFactoryWithOutputSchema as createProviderDefinedToolFactoryWithOutputSchema6,
3243
3227
  lazySchema as lazySchema18,
3244
3228
  zodSchema as zodSchema18
3245
3229
  } from "@ai-sdk/provider-utils";
@@ -3267,7 +3251,7 @@ var imageGenerationInputSchema = lazySchema18(() => zodSchema18(z20.object({})))
3267
3251
  var imageGenerationOutputSchema = lazySchema18(
3268
3252
  () => zodSchema18(z20.object({ result: z20.string() }))
3269
3253
  );
3270
- var imageGenerationToolFactory = createProviderDefinedToolFactoryWithOutputSchema4({
3254
+ var imageGenerationToolFactory = createProviderDefinedToolFactoryWithOutputSchema6({
3271
3255
  id: "openai.image_generation",
3272
3256
  name: "image_generation",
3273
3257
  inputSchema: imageGenerationInputSchema,
@@ -3629,7 +3613,8 @@ var OpenAIResponsesLanguageModel = class {
3629
3613
  tools: openaiTools,
3630
3614
  tool_choice: openaiToolChoice
3631
3615
  },
3632
- warnings: [...warnings, ...toolWarnings]
3616
+ warnings: [...warnings, ...toolWarnings],
3617
+ store
3633
3618
  };
3634
3619
  }
3635
3620
  async doGenerate(options) {
@@ -3784,14 +3769,14 @@ var OpenAIResponsesLanguageModel = class {
3784
3769
  type: "tool-call",
3785
3770
  toolCallId: part.id,
3786
3771
  toolName: webSearchToolName != null ? webSearchToolName : "web_search",
3787
- input: JSON.stringify({ action: part.action }),
3772
+ input: JSON.stringify({}),
3788
3773
  providerExecuted: true
3789
3774
  });
3790
3775
  content.push({
3791
3776
  type: "tool-result",
3792
3777
  toolCallId: part.id,
3793
3778
  toolName: webSearchToolName != null ? webSearchToolName : "web_search",
3794
- result: { status: part.status },
3779
+ result: mapWebSearchOutput(part.action),
3795
3780
  providerExecuted: true
3796
3781
  });
3797
3782
  break;
@@ -3904,7 +3889,8 @@ var OpenAIResponsesLanguageModel = class {
3904
3889
  const {
3905
3890
  args: body,
3906
3891
  warnings,
3907
- webSearchToolName
3892
+ webSearchToolName,
3893
+ store
3908
3894
  } = await this.getArgs(options);
3909
3895
  const { responseHeaders, value: response } = await postJsonToApi6({
3910
3896
  url: this.config.url({
@@ -3943,7 +3929,7 @@ var OpenAIResponsesLanguageModel = class {
3943
3929
  controller.enqueue({ type: "stream-start", warnings });
3944
3930
  },
3945
3931
  transform(chunk, controller) {
3946
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w;
3932
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
3947
3933
  if (options.includeRawChunks) {
3948
3934
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
3949
3935
  }
@@ -3975,6 +3961,17 @@ var OpenAIResponsesLanguageModel = class {
3975
3961
  toolName: webSearchToolName != null ? webSearchToolName : "web_search",
3976
3962
  providerExecuted: true
3977
3963
  });
3964
+ controller.enqueue({
3965
+ type: "tool-input-end",
3966
+ id: value.item.id
3967
+ });
3968
+ controller.enqueue({
3969
+ type: "tool-call",
3970
+ toolCallId: value.item.id,
3971
+ toolName: "web_search",
3972
+ input: JSON.stringify({}),
3973
+ providerExecuted: true
3974
+ });
3978
3975
  } else if (value.item.type === "computer_call") {
3979
3976
  ongoingToolCalls[value.output_index] = {
3980
3977
  toolName: "computer_use",
@@ -4031,10 +4028,10 @@ var OpenAIResponsesLanguageModel = class {
4031
4028
  }
4032
4029
  }
4033
4030
  });
4034
- } else if (isResponseOutputItemAddedReasoningChunk(value)) {
4031
+ } else if (isResponseOutputItemAddedChunk(value) && value.item.type === "reasoning") {
4035
4032
  activeReasoning[value.item.id] = {
4036
4033
  encryptedContent: value.item.encrypted_content,
4037
- summaryParts: [0]
4034
+ summaryParts: { 0: "active" }
4038
4035
  };
4039
4036
  controller.enqueue({
4040
4037
  type: "reasoning-start",
@@ -4068,22 +4065,11 @@ var OpenAIResponsesLanguageModel = class {
4068
4065
  });
4069
4066
  } else if (value.item.type === "web_search_call") {
4070
4067
  ongoingToolCalls[value.output_index] = void 0;
4071
- controller.enqueue({
4072
- type: "tool-input-end",
4073
- id: value.item.id
4074
- });
4075
- controller.enqueue({
4076
- type: "tool-call",
4077
- toolCallId: value.item.id,
4078
- toolName: "web_search",
4079
- input: JSON.stringify({ action: value.item.action }),
4080
- providerExecuted: true
4081
- });
4082
4068
  controller.enqueue({
4083
4069
  type: "tool-result",
4084
4070
  toolCallId: value.item.id,
4085
4071
  toolName: "web_search",
4086
- result: { status: value.item.status },
4072
+ result: mapWebSearchOutput(value.item.action),
4087
4073
  providerExecuted: true
4088
4074
  });
4089
4075
  } else if (value.item.type === "computer_call") {
@@ -4173,9 +4159,14 @@ var OpenAIResponsesLanguageModel = class {
4173
4159
  type: "text-end",
4174
4160
  id: value.item.id
4175
4161
  });
4176
- } else if (isResponseOutputItemDoneReasoningChunk(value)) {
4162
+ } else if (value.item.type === "reasoning") {
4177
4163
  const activeReasoningPart = activeReasoning[value.item.id];
4178
- for (const summaryIndex of activeReasoningPart.summaryParts) {
4164
+ const summaryPartIndices = Object.entries(
4165
+ activeReasoningPart.summaryParts
4166
+ ).filter(
4167
+ ([_, status]) => status === "active" || status === "can-conclude"
4168
+ ).map(([summaryIndex]) => summaryIndex);
4169
+ for (const summaryIndex of summaryPartIndices) {
4179
4170
  controller.enqueue({
4180
4171
  type: "reasoning-end",
4181
4172
  id: `${value.item.id}:${summaryIndex}`,
@@ -4260,23 +4251,34 @@ var OpenAIResponsesLanguageModel = class {
4260
4251
  if (((_f = (_e = options.providerOptions) == null ? void 0 : _e.openai) == null ? void 0 : _f.logprobs) && value.logprobs) {
4261
4252
  logprobs.push(value.logprobs);
4262
4253
  }
4263
- } else if (isResponseReasoningSummaryPartAddedChunk(value)) {
4254
+ } else if (value.type === "response.reasoning_summary_part.added") {
4264
4255
  if (value.summary_index > 0) {
4265
- (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.summaryParts.push(
4266
- value.summary_index
4267
- );
4256
+ const activeReasoningPart = activeReasoning[value.item_id];
4257
+ activeReasoningPart.summaryParts[value.summary_index] = "active";
4258
+ for (const summaryIndex of Object.keys(
4259
+ activeReasoningPart.summaryParts
4260
+ )) {
4261
+ if (activeReasoningPart.summaryParts[summaryIndex] === "can-conclude") {
4262
+ controller.enqueue({
4263
+ type: "reasoning-end",
4264
+ id: `${value.item_id}:${summaryIndex}`,
4265
+ providerMetadata: { openai: { itemId: value.item_id } }
4266
+ });
4267
+ activeReasoningPart.summaryParts[summaryIndex] = "concluded";
4268
+ }
4269
+ }
4268
4270
  controller.enqueue({
4269
4271
  type: "reasoning-start",
4270
4272
  id: `${value.item_id}:${value.summary_index}`,
4271
4273
  providerMetadata: {
4272
4274
  openai: {
4273
4275
  itemId: value.item_id,
4274
- reasoningEncryptedContent: (_i = (_h = activeReasoning[value.item_id]) == null ? void 0 : _h.encryptedContent) != null ? _i : null
4276
+ reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
4275
4277
  }
4276
4278
  }
4277
4279
  });
4278
4280
  }
4279
- } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
4281
+ } else if (value.type === "response.reasoning_summary_text.delta") {
4280
4282
  controller.enqueue({
4281
4283
  type: "reasoning-delta",
4282
4284
  id: `${value.item_id}:${value.summary_index}`,
@@ -4287,16 +4289,29 @@ var OpenAIResponsesLanguageModel = class {
4287
4289
  }
4288
4290
  }
4289
4291
  });
4292
+ } else if (value.type === "response.reasoning_summary_part.done") {
4293
+ if (store) {
4294
+ controller.enqueue({
4295
+ type: "reasoning-end",
4296
+ id: `${value.item_id}:${value.summary_index}`,
4297
+ providerMetadata: {
4298
+ openai: { itemId: value.item_id }
4299
+ }
4300
+ });
4301
+ activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
4302
+ } else {
4303
+ activeReasoning[value.item_id].summaryParts[value.summary_index] = "can-conclude";
4304
+ }
4290
4305
  } else if (isResponseFinishedChunk(value)) {
4291
4306
  finishReason = mapOpenAIResponseFinishReason({
4292
- finishReason: (_j = value.response.incomplete_details) == null ? void 0 : _j.reason,
4307
+ finishReason: (_i = value.response.incomplete_details) == null ? void 0 : _i.reason,
4293
4308
  hasFunctionCall
4294
4309
  });
4295
4310
  usage.inputTokens = value.response.usage.input_tokens;
4296
4311
  usage.outputTokens = value.response.usage.output_tokens;
4297
4312
  usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
4298
- usage.reasoningTokens = (_l = (_k = value.response.usage.output_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : void 0;
4299
- usage.cachedInputTokens = (_n = (_m = value.response.usage.input_tokens_details) == null ? void 0 : _m.cached_tokens) != null ? _n : void 0;
4313
+ usage.reasoningTokens = (_k = (_j = value.response.usage.output_tokens_details) == null ? void 0 : _j.reasoning_tokens) != null ? _k : void 0;
4314
+ usage.cachedInputTokens = (_m = (_l = value.response.usage.input_tokens_details) == null ? void 0 : _l.cached_tokens) != null ? _m : void 0;
4300
4315
  if (typeof value.response.service_tier === "string") {
4301
4316
  serviceTier = value.response.service_tier;
4302
4317
  }
@@ -4305,7 +4320,7 @@ var OpenAIResponsesLanguageModel = class {
4305
4320
  controller.enqueue({
4306
4321
  type: "source",
4307
4322
  sourceType: "url",
4308
- id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : generateId2(),
4323
+ id: (_p = (_o = (_n = self.config).generateId) == null ? void 0 : _o.call(_n)) != null ? _p : generateId2(),
4309
4324
  url: value.annotation.url,
4310
4325
  title: value.annotation.title
4311
4326
  });
@@ -4313,10 +4328,10 @@ var OpenAIResponsesLanguageModel = class {
4313
4328
  controller.enqueue({
4314
4329
  type: "source",
4315
4330
  sourceType: "document",
4316
- id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : generateId2(),
4331
+ id: (_s = (_r = (_q = self.config).generateId) == null ? void 0 : _r.call(_q)) != null ? _s : generateId2(),
4317
4332
  mediaType: "text/plain",
4318
- title: (_v = (_u = value.annotation.quote) != null ? _u : value.annotation.filename) != null ? _v : "Document",
4319
- filename: (_w = value.annotation.filename) != null ? _w : value.annotation.file_id
4333
+ title: (_u = (_t = value.annotation.quote) != null ? _t : value.annotation.filename) != null ? _u : "Document",
4334
+ filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id
4320
4335
  });
4321
4336
  }
4322
4337
  } else if (isErrorChunk(value)) {
@@ -4355,9 +4370,6 @@ function isTextDeltaChunk(chunk) {
4355
4370
  function isResponseOutputItemDoneChunk(chunk) {
4356
4371
  return chunk.type === "response.output_item.done";
4357
4372
  }
4358
- function isResponseOutputItemDoneReasoningChunk(chunk) {
4359
- return isResponseOutputItemDoneChunk(chunk) && chunk.item.type === "reasoning";
4360
- }
4361
4373
  function isResponseFinishedChunk(chunk) {
4362
4374
  return chunk.type === "response.completed" || chunk.type === "response.incomplete";
4363
4375
  }
@@ -4379,18 +4391,9 @@ function isResponseCodeInterpreterCallCodeDoneChunk(chunk) {
4379
4391
  function isResponseOutputItemAddedChunk(chunk) {
4380
4392
  return chunk.type === "response.output_item.added";
4381
4393
  }
4382
- function isResponseOutputItemAddedReasoningChunk(chunk) {
4383
- return isResponseOutputItemAddedChunk(chunk) && chunk.item.type === "reasoning";
4384
- }
4385
4394
  function isResponseAnnotationAddedChunk(chunk) {
4386
4395
  return chunk.type === "response.output_text.annotation.added";
4387
4396
  }
4388
- function isResponseReasoningSummaryPartAddedChunk(chunk) {
4389
- return chunk.type === "response.reasoning_summary_part.added";
4390
- }
4391
- function isResponseReasoningSummaryTextDeltaChunk(chunk) {
4392
- return chunk.type === "response.reasoning_summary_text.delta";
4393
- }
4394
4397
  function isErrorChunk(chunk) {
4395
4398
  return chunk.type === "error";
4396
4399
  }
@@ -4428,6 +4431,19 @@ function getResponsesModelConfig(modelId) {
4428
4431
  isReasoningModel: false
4429
4432
  };
4430
4433
  }
4434
+ function mapWebSearchOutput(action) {
4435
+ var _a;
4436
+ switch (action.type) {
4437
+ case "search":
4438
+ return { action: { type: "search", query: (_a = action.query) != null ? _a : void 0 } };
4439
+ case "open_page":
4440
+ return { action: { type: "openPage", url: action.url } };
4441
+ case "find":
4442
+ return {
4443
+ action: { type: "find", url: action.url, pattern: action.pattern }
4444
+ };
4445
+ }
4446
+ }
4431
4447
  export {
4432
4448
  OpenAIChatLanguageModel,
4433
4449
  OpenAICompletionLanguageModel,