@ai-sdk/openai 3.0.1 → 3.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2463,6 +2463,7 @@ async function convertToOpenAIResponsesInput({
2463
2463
  prompt,
2464
2464
  toolNameMapping,
2465
2465
  systemMessageMode,
2466
+ providerOptionsName,
2466
2467
  fileIdPrefixes,
2467
2468
  store,
2468
2469
  hasLocalShellTool = false,
@@ -2518,7 +2519,7 @@ async function convertToOpenAIResponsesInput({
2518
2519
  ...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
2519
2520
  image_url: `data:${mediaType};base64,${(0, import_provider_utils22.convertToBase64)(part.data)}`
2520
2521
  },
2521
- detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
2522
+ detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2[providerOptionsName]) == null ? void 0 : _b2.imageDetail
2522
2523
  };
2523
2524
  } else if (part.mediaType === "application/pdf") {
2524
2525
  if (part.data instanceof URL) {
@@ -2550,7 +2551,7 @@ async function convertToOpenAIResponsesInput({
2550
2551
  for (const part of content) {
2551
2552
  switch (part.type) {
2552
2553
  case "text": {
2553
- const id = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId;
2554
+ const id = (_b = (_a = part.providerOptions) == null ? void 0 : _a[providerOptionsName]) == null ? void 0 : _b.itemId;
2554
2555
  if (store && id != null) {
2555
2556
  input.push({ type: "item_reference", id });
2556
2557
  break;
@@ -2563,7 +2564,7 @@ async function convertToOpenAIResponsesInput({
2563
2564
  break;
2564
2565
  }
2565
2566
  case "tool-call": {
2566
- const id = (_g = (_d = (_c = part.providerOptions) == null ? void 0 : _c.openai) == null ? void 0 : _d.itemId) != null ? _g : (_f = (_e = part.providerMetadata) == null ? void 0 : _e.openai) == null ? void 0 : _f.itemId;
2567
+ const id = (_g = (_d = (_c = part.providerOptions) == null ? void 0 : _c[providerOptionsName]) == null ? void 0 : _d.itemId) != null ? _g : (_f = (_e = part.providerMetadata) == null ? void 0 : _e[providerOptionsName]) == null ? void 0 : _f.itemId;
2567
2568
  if (part.providerExecuted) {
2568
2569
  if (store && id != null) {
2569
2570
  input.push({ type: "item_reference", id });
@@ -2630,7 +2631,7 @@ async function convertToOpenAIResponsesInput({
2630
2631
  break;
2631
2632
  }
2632
2633
  if (store) {
2633
- const itemId = (_j = (_i = (_h = part.providerMetadata) == null ? void 0 : _h.openai) == null ? void 0 : _i.itemId) != null ? _j : part.toolCallId;
2634
+ const itemId = (_j = (_i = (_h = part.providerMetadata) == null ? void 0 : _h[providerOptionsName]) == null ? void 0 : _i.itemId) != null ? _j : part.toolCallId;
2634
2635
  input.push({ type: "item_reference", id: itemId });
2635
2636
  } else {
2636
2637
  warnings.push({
@@ -2642,7 +2643,7 @@ async function convertToOpenAIResponsesInput({
2642
2643
  }
2643
2644
  case "reasoning": {
2644
2645
  const providerOptions = await (0, import_provider_utils22.parseProviderOptions)({
2645
- provider: "openai",
2646
+ provider: providerOptionsName,
2646
2647
  providerOptions: part.providerOptions,
2647
2648
  schema: openaiResponsesReasoningProviderOptionsSchema
2648
2649
  });
@@ -4307,11 +4308,19 @@ var OpenAIResponsesLanguageModel = class {
4307
4308
  if (stopSequences != null) {
4308
4309
  warnings.push({ type: "unsupported", feature: "stopSequences" });
4309
4310
  }
4310
- const openaiOptions = await (0, import_provider_utils32.parseProviderOptions)({
4311
- provider: "openai",
4311
+ const providerOptionsName = this.config.provider.includes("azure") ? "azure" : "openai";
4312
+ let openaiOptions = await (0, import_provider_utils32.parseProviderOptions)({
4313
+ provider: providerOptionsName,
4312
4314
  providerOptions,
4313
4315
  schema: openaiResponsesProviderOptionsSchema
4314
4316
  });
4317
+ if (openaiOptions == null && providerOptionsName !== "openai") {
4318
+ openaiOptions = await (0, import_provider_utils32.parseProviderOptions)({
4319
+ provider: "openai",
4320
+ providerOptions,
4321
+ schema: openaiResponsesProviderOptionsSchema
4322
+ });
4323
+ }
4315
4324
  const isReasoningModel = (_a = openaiOptions == null ? void 0 : openaiOptions.forceReasoning) != null ? _a : modelCapabilities.isReasoningModel;
4316
4325
  if ((openaiOptions == null ? void 0 : openaiOptions.conversation) && (openaiOptions == null ? void 0 : openaiOptions.previousResponseId)) {
4317
4326
  warnings.push({
@@ -4338,6 +4347,7 @@ var OpenAIResponsesLanguageModel = class {
4338
4347
  prompt,
4339
4348
  toolNameMapping,
4340
4349
  systemMessageMode: (_b = openaiOptions == null ? void 0 : openaiOptions.systemMessageMode) != null ? _b : isReasoningModel ? "developer" : modelCapabilities.systemMessageMode,
4350
+ providerOptionsName,
4341
4351
  fileIdPrefixes: this.config.fileIdPrefixes,
4342
4352
  store: (_c = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _c : true,
4343
4353
  hasLocalShellTool: hasOpenAITool("openai.local_shell"),
@@ -4492,7 +4502,8 @@ var OpenAIResponsesLanguageModel = class {
4492
4502
  },
4493
4503
  warnings: [...warnings, ...toolWarnings],
4494
4504
  store,
4495
- toolNameMapping
4505
+ toolNameMapping,
4506
+ providerOptionsName
4496
4507
  };
4497
4508
  }
4498
4509
  async doGenerate(options) {
@@ -4501,13 +4512,13 @@ var OpenAIResponsesLanguageModel = class {
4501
4512
  args: body,
4502
4513
  warnings,
4503
4514
  webSearchToolName,
4504
- toolNameMapping
4515
+ toolNameMapping,
4516
+ providerOptionsName
4505
4517
  } = await this.getArgs(options);
4506
4518
  const url = this.config.url({
4507
4519
  path: "/responses",
4508
4520
  modelId: this.modelId
4509
4521
  });
4510
- const providerKey = this.config.provider.replace(".responses", "");
4511
4522
  const approvalRequestIdToDummyToolCallIdFromPrompt = extractApprovalRequestIdToToolCallIdMapping(options.prompt);
4512
4523
  const {
4513
4524
  responseHeaders,
@@ -4549,7 +4560,7 @@ var OpenAIResponsesLanguageModel = class {
4549
4560
  type: "reasoning",
4550
4561
  text: summary.text,
4551
4562
  providerMetadata: {
4552
- [providerKey]: {
4563
+ [providerOptionsName]: {
4553
4564
  itemId: part.id,
4554
4565
  reasoningEncryptedContent: (_a = part.encrypted_content) != null ? _a : null
4555
4566
  }
@@ -4585,7 +4596,7 @@ var OpenAIResponsesLanguageModel = class {
4585
4596
  action: part.action
4586
4597
  }),
4587
4598
  providerMetadata: {
4588
- [providerKey]: {
4599
+ [providerOptionsName]: {
4589
4600
  itemId: part.id
4590
4601
  }
4591
4602
  }
@@ -4603,7 +4614,7 @@ var OpenAIResponsesLanguageModel = class {
4603
4614
  }
4604
4615
  }),
4605
4616
  providerMetadata: {
4606
- [providerKey]: {
4617
+ [providerOptionsName]: {
4607
4618
  itemId: part.id
4608
4619
  }
4609
4620
  }
@@ -4612,7 +4623,7 @@ var OpenAIResponsesLanguageModel = class {
4612
4623
  }
4613
4624
  case "message": {
4614
4625
  for (const contentPart of part.content) {
4615
- if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
4626
+ if (((_c = (_b = options.providerOptions) == null ? void 0 : _b[providerOptionsName]) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
4616
4627
  logprobs.push(contentPart.logprobs);
4617
4628
  }
4618
4629
  const providerMetadata2 = {
@@ -4625,7 +4636,7 @@ var OpenAIResponsesLanguageModel = class {
4625
4636
  type: "text",
4626
4637
  text: contentPart.text,
4627
4638
  providerMetadata: {
4628
- [providerKey]: providerMetadata2
4639
+ [providerOptionsName]: providerMetadata2
4629
4640
  }
4630
4641
  });
4631
4642
  for (const annotation of contentPart.annotations) {
@@ -4647,7 +4658,7 @@ var OpenAIResponsesLanguageModel = class {
4647
4658
  filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
4648
4659
  ...annotation.file_id ? {
4649
4660
  providerMetadata: {
4650
- [providerKey]: {
4661
+ [providerOptionsName]: {
4651
4662
  fileId: annotation.file_id
4652
4663
  }
4653
4664
  }
@@ -4662,7 +4673,7 @@ var OpenAIResponsesLanguageModel = class {
4662
4673
  title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
4663
4674
  filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
4664
4675
  providerMetadata: {
4665
- [providerKey]: {
4676
+ [providerOptionsName]: {
4666
4677
  fileId: annotation.file_id,
4667
4678
  containerId: annotation.container_id,
4668
4679
  ...annotation.index != null ? { index: annotation.index } : {}
@@ -4678,7 +4689,7 @@ var OpenAIResponsesLanguageModel = class {
4678
4689
  title: annotation.file_id,
4679
4690
  filename: annotation.file_id,
4680
4691
  providerMetadata: {
4681
- [providerKey]: {
4692
+ [providerOptionsName]: {
4682
4693
  fileId: annotation.file_id,
4683
4694
  ...annotation.index != null ? { index: annotation.index } : {}
4684
4695
  }
@@ -4697,7 +4708,7 @@ var OpenAIResponsesLanguageModel = class {
4697
4708
  toolName: part.name,
4698
4709
  input: part.arguments,
4699
4710
  providerMetadata: {
4700
- [providerKey]: {
4711
+ [providerOptionsName]: {
4701
4712
  itemId: part.id
4702
4713
  }
4703
4714
  }
@@ -4748,7 +4759,7 @@ var OpenAIResponsesLanguageModel = class {
4748
4759
  ...part.error != null ? { error: part.error } : {}
4749
4760
  },
4750
4761
  providerMetadata: {
4751
- [providerKey]: {
4762
+ [providerOptionsName]: {
4752
4763
  itemId: part.id
4753
4764
  }
4754
4765
  }
@@ -4852,7 +4863,7 @@ var OpenAIResponsesLanguageModel = class {
4852
4863
  operation: part.operation
4853
4864
  }),
4854
4865
  providerMetadata: {
4855
- [providerKey]: {
4866
+ [providerOptionsName]: {
4856
4867
  itemId: part.id
4857
4868
  }
4858
4869
  }
@@ -4862,13 +4873,13 @@ var OpenAIResponsesLanguageModel = class {
4862
4873
  }
4863
4874
  }
4864
4875
  const providerMetadata = {
4865
- [providerKey]: { responseId: response.id }
4876
+ [providerOptionsName]: { responseId: response.id }
4866
4877
  };
4867
4878
  if (logprobs.length > 0) {
4868
- providerMetadata[providerKey].logprobs = logprobs;
4879
+ providerMetadata[providerOptionsName].logprobs = logprobs;
4869
4880
  }
4870
4881
  if (typeof response.service_tier === "string") {
4871
- providerMetadata[providerKey].serviceTier = response.service_tier;
4882
+ providerMetadata[providerOptionsName].serviceTier = response.service_tier;
4872
4883
  }
4873
4884
  const usage = response.usage;
4874
4885
  return {
@@ -4899,7 +4910,8 @@ var OpenAIResponsesLanguageModel = class {
4899
4910
  warnings,
4900
4911
  webSearchToolName,
4901
4912
  toolNameMapping,
4902
- store
4913
+ store,
4914
+ providerOptionsName
4903
4915
  } = await this.getArgs(options);
4904
4916
  const { responseHeaders, value: response } = await (0, import_provider_utils32.postJsonToApi)({
4905
4917
  url: this.config.url({
@@ -4919,7 +4931,6 @@ var OpenAIResponsesLanguageModel = class {
4919
4931
  fetch: this.config.fetch
4920
4932
  });
4921
4933
  const self = this;
4922
- const providerKey = this.config.provider.replace(".responses", "");
4923
4934
  const approvalRequestIdToDummyToolCallIdFromPrompt = extractApprovalRequestIdToToolCallIdMapping(options.prompt);
4924
4935
  const approvalRequestIdToDummyToolCallIdFromStream = /* @__PURE__ */ new Map();
4925
4936
  let finishReason = {
@@ -5085,7 +5096,7 @@ var OpenAIResponsesLanguageModel = class {
5085
5096
  type: "text-start",
5086
5097
  id: value.item.id,
5087
5098
  providerMetadata: {
5088
- [providerKey]: {
5099
+ [providerOptionsName]: {
5089
5100
  itemId: value.item.id
5090
5101
  }
5091
5102
  }
@@ -5099,7 +5110,7 @@ var OpenAIResponsesLanguageModel = class {
5099
5110
  type: "reasoning-start",
5100
5111
  id: `${value.item.id}:0`,
5101
5112
  providerMetadata: {
5102
- [providerKey]: {
5113
+ [providerOptionsName]: {
5103
5114
  itemId: value.item.id,
5104
5115
  reasoningEncryptedContent: (_a = value.item.encrypted_content) != null ? _a : null
5105
5116
  }
@@ -5112,7 +5123,7 @@ var OpenAIResponsesLanguageModel = class {
5112
5123
  type: "text-end",
5113
5124
  id: value.item.id,
5114
5125
  providerMetadata: {
5115
- [providerKey]: {
5126
+ [providerOptionsName]: {
5116
5127
  itemId: value.item.id,
5117
5128
  ...ongoingAnnotations.length > 0 && {
5118
5129
  annotations: ongoingAnnotations
@@ -5133,7 +5144,7 @@ var OpenAIResponsesLanguageModel = class {
5133
5144
  toolName: value.item.name,
5134
5145
  input: value.item.arguments,
5135
5146
  providerMetadata: {
5136
- [providerKey]: {
5147
+ [providerOptionsName]: {
5137
5148
  itemId: value.item.id
5138
5149
  }
5139
5150
  }
@@ -5234,7 +5245,7 @@ var OpenAIResponsesLanguageModel = class {
5234
5245
  ...value.item.error != null ? { error: value.item.error } : {}
5235
5246
  },
5236
5247
  providerMetadata: {
5237
- [providerKey]: {
5248
+ [providerOptionsName]: {
5238
5249
  itemId: value.item.id
5239
5250
  }
5240
5251
  }
@@ -5272,7 +5283,7 @@ var OpenAIResponsesLanguageModel = class {
5272
5283
  operation: value.item.operation
5273
5284
  }),
5274
5285
  providerMetadata: {
5275
- [providerKey]: {
5286
+ [providerOptionsName]: {
5276
5287
  itemId: value.item.id
5277
5288
  }
5278
5289
  }
@@ -5318,7 +5329,7 @@ var OpenAIResponsesLanguageModel = class {
5318
5329
  }
5319
5330
  }),
5320
5331
  providerMetadata: {
5321
- [providerKey]: { itemId: value.item.id }
5332
+ [providerOptionsName]: { itemId: value.item.id }
5322
5333
  }
5323
5334
  });
5324
5335
  } else if (value.item.type === "shell_call") {
@@ -5333,7 +5344,7 @@ var OpenAIResponsesLanguageModel = class {
5333
5344
  }
5334
5345
  }),
5335
5346
  providerMetadata: {
5336
- [providerKey]: { itemId: value.item.id }
5347
+ [providerOptionsName]: { itemId: value.item.id }
5337
5348
  }
5338
5349
  });
5339
5350
  } else if (value.item.type === "reasoning") {
@@ -5348,7 +5359,7 @@ var OpenAIResponsesLanguageModel = class {
5348
5359
  type: "reasoning-end",
5349
5360
  id: `${value.item.id}:${summaryIndex}`,
5350
5361
  providerMetadata: {
5351
- [providerKey]: {
5362
+ [providerOptionsName]: {
5352
5363
  itemId: value.item.id,
5353
5364
  reasoningEncryptedContent: (_k = value.item.encrypted_content) != null ? _k : null
5354
5365
  }
@@ -5454,7 +5465,7 @@ var OpenAIResponsesLanguageModel = class {
5454
5465
  id: value.item_id,
5455
5466
  delta: value.delta
5456
5467
  });
5457
- if (((_m = (_l = options.providerOptions) == null ? void 0 : _l.openai) == null ? void 0 : _m.logprobs) && value.logprobs) {
5468
+ if (((_m = (_l = options.providerOptions) == null ? void 0 : _l[providerOptionsName]) == null ? void 0 : _m.logprobs) && value.logprobs) {
5458
5469
  logprobs.push(value.logprobs);
5459
5470
  }
5460
5471
  } else if (value.type === "response.reasoning_summary_part.added") {
@@ -5469,7 +5480,7 @@ var OpenAIResponsesLanguageModel = class {
5469
5480
  type: "reasoning-end",
5470
5481
  id: `${value.item_id}:${summaryIndex}`,
5471
5482
  providerMetadata: {
5472
- [providerKey]: { itemId: value.item_id }
5483
+ [providerOptionsName]: { itemId: value.item_id }
5473
5484
  }
5474
5485
  });
5475
5486
  activeReasoningPart.summaryParts[summaryIndex] = "concluded";
@@ -5479,7 +5490,7 @@ var OpenAIResponsesLanguageModel = class {
5479
5490
  type: "reasoning-start",
5480
5491
  id: `${value.item_id}:${value.summary_index}`,
5481
5492
  providerMetadata: {
5482
- [providerKey]: {
5493
+ [providerOptionsName]: {
5483
5494
  itemId: value.item_id,
5484
5495
  reasoningEncryptedContent: (_o = (_n = activeReasoning[value.item_id]) == null ? void 0 : _n.encryptedContent) != null ? _o : null
5485
5496
  }
@@ -5492,7 +5503,7 @@ var OpenAIResponsesLanguageModel = class {
5492
5503
  id: `${value.item_id}:${value.summary_index}`,
5493
5504
  delta: value.delta,
5494
5505
  providerMetadata: {
5495
- [providerKey]: {
5506
+ [providerOptionsName]: {
5496
5507
  itemId: value.item_id
5497
5508
  }
5498
5509
  }
@@ -5503,7 +5514,7 @@ var OpenAIResponsesLanguageModel = class {
5503
5514
  type: "reasoning-end",
5504
5515
  id: `${value.item_id}:${value.summary_index}`,
5505
5516
  providerMetadata: {
5506
- [providerKey]: { itemId: value.item_id }
5517
+ [providerOptionsName]: { itemId: value.item_id }
5507
5518
  }
5508
5519
  });
5509
5520
  activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
@@ -5542,7 +5553,7 @@ var OpenAIResponsesLanguageModel = class {
5542
5553
  filename: (_A = value.annotation.filename) != null ? _A : value.annotation.file_id,
5543
5554
  ...value.annotation.file_id ? {
5544
5555
  providerMetadata: {
5545
- [providerKey]: {
5556
+ [providerOptionsName]: {
5546
5557
  fileId: value.annotation.file_id
5547
5558
  }
5548
5559
  }
@@ -5557,7 +5568,7 @@ var OpenAIResponsesLanguageModel = class {
5557
5568
  title: (_F = (_E = value.annotation.filename) != null ? _E : value.annotation.file_id) != null ? _F : "Document",
5558
5569
  filename: (_G = value.annotation.filename) != null ? _G : value.annotation.file_id,
5559
5570
  providerMetadata: {
5560
- [providerKey]: {
5571
+ [providerOptionsName]: {
5561
5572
  fileId: value.annotation.file_id,
5562
5573
  containerId: value.annotation.container_id,
5563
5574
  ...value.annotation.index != null ? { index: value.annotation.index } : {}
@@ -5573,7 +5584,7 @@ var OpenAIResponsesLanguageModel = class {
5573
5584
  title: value.annotation.file_id,
5574
5585
  filename: value.annotation.file_id,
5575
5586
  providerMetadata: {
5576
- [providerKey]: {
5587
+ [providerOptionsName]: {
5577
5588
  fileId: value.annotation.file_id,
5578
5589
  ...value.annotation.index != null ? { index: value.annotation.index } : {}
5579
5590
  }
@@ -5586,15 +5597,15 @@ var OpenAIResponsesLanguageModel = class {
5586
5597
  },
5587
5598
  flush(controller) {
5588
5599
  const providerMetadata = {
5589
- [providerKey]: {
5600
+ [providerOptionsName]: {
5590
5601
  responseId
5591
5602
  }
5592
5603
  };
5593
5604
  if (logprobs.length > 0) {
5594
- providerMetadata[providerKey].logprobs = logprobs;
5605
+ providerMetadata[providerOptionsName].logprobs = logprobs;
5595
5606
  }
5596
5607
  if (serviceTier !== void 0) {
5597
- providerMetadata[providerKey].serviceTier = serviceTier;
5608
+ providerMetadata[providerOptionsName].serviceTier = serviceTier;
5598
5609
  }
5599
5610
  controller.enqueue({
5600
5611
  type: "finish",