@ai-sdk/openai 3.0.1 → 3.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,20 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 3.0.3
4
+
5
+ ### Patch Changes
6
+
7
+ - 55cd1a4: fix(azure): allow 'azure' as a key for providerOptions
8
+
9
+ ## 3.0.2
10
+
11
+ ### Patch Changes
12
+
13
+ - 863d34f: fix: trigger release to update `@latest`
14
+ - Updated dependencies [863d34f]
15
+ - @ai-sdk/provider@3.0.1
16
+ - @ai-sdk/provider-utils@4.0.2
17
+
3
18
  ## 3.0.1
4
19
 
5
20
  ### Patch Changes
package/dist/index.js CHANGED
@@ -2472,6 +2472,7 @@ async function convertToOpenAIResponsesInput({
2472
2472
  prompt,
2473
2473
  toolNameMapping,
2474
2474
  systemMessageMode,
2475
+ providerOptionsName,
2475
2476
  fileIdPrefixes,
2476
2477
  store,
2477
2478
  hasLocalShellTool = false,
@@ -2527,7 +2528,7 @@ async function convertToOpenAIResponsesInput({
2527
2528
  ...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
2528
2529
  image_url: `data:${mediaType};base64,${(0, import_provider_utils23.convertToBase64)(part.data)}`
2529
2530
  },
2530
- detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
2531
+ detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2[providerOptionsName]) == null ? void 0 : _b2.imageDetail
2531
2532
  };
2532
2533
  } else if (part.mediaType === "application/pdf") {
2533
2534
  if (part.data instanceof URL) {
@@ -2559,7 +2560,7 @@ async function convertToOpenAIResponsesInput({
2559
2560
  for (const part of content) {
2560
2561
  switch (part.type) {
2561
2562
  case "text": {
2562
- const id = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId;
2563
+ const id = (_b = (_a = part.providerOptions) == null ? void 0 : _a[providerOptionsName]) == null ? void 0 : _b.itemId;
2563
2564
  if (store && id != null) {
2564
2565
  input.push({ type: "item_reference", id });
2565
2566
  break;
@@ -2572,7 +2573,7 @@ async function convertToOpenAIResponsesInput({
2572
2573
  break;
2573
2574
  }
2574
2575
  case "tool-call": {
2575
- const id = (_g = (_d = (_c = part.providerOptions) == null ? void 0 : _c.openai) == null ? void 0 : _d.itemId) != null ? _g : (_f = (_e = part.providerMetadata) == null ? void 0 : _e.openai) == null ? void 0 : _f.itemId;
2576
+ const id = (_g = (_d = (_c = part.providerOptions) == null ? void 0 : _c[providerOptionsName]) == null ? void 0 : _d.itemId) != null ? _g : (_f = (_e = part.providerMetadata) == null ? void 0 : _e[providerOptionsName]) == null ? void 0 : _f.itemId;
2576
2577
  if (part.providerExecuted) {
2577
2578
  if (store && id != null) {
2578
2579
  input.push({ type: "item_reference", id });
@@ -2639,7 +2640,7 @@ async function convertToOpenAIResponsesInput({
2639
2640
  break;
2640
2641
  }
2641
2642
  if (store) {
2642
- const itemId = (_j = (_i = (_h = part.providerMetadata) == null ? void 0 : _h.openai) == null ? void 0 : _i.itemId) != null ? _j : part.toolCallId;
2643
+ const itemId = (_j = (_i = (_h = part.providerMetadata) == null ? void 0 : _h[providerOptionsName]) == null ? void 0 : _i.itemId) != null ? _j : part.toolCallId;
2643
2644
  input.push({ type: "item_reference", id: itemId });
2644
2645
  } else {
2645
2646
  warnings.push({
@@ -2651,7 +2652,7 @@ async function convertToOpenAIResponsesInput({
2651
2652
  }
2652
2653
  case "reasoning": {
2653
2654
  const providerOptions = await (0, import_provider_utils23.parseProviderOptions)({
2654
- provider: "openai",
2655
+ provider: providerOptionsName,
2655
2656
  providerOptions: part.providerOptions,
2656
2657
  schema: openaiResponsesReasoningProviderOptionsSchema
2657
2658
  });
@@ -4024,11 +4025,19 @@ var OpenAIResponsesLanguageModel = class {
4024
4025
  if (stopSequences != null) {
4025
4026
  warnings.push({ type: "unsupported", feature: "stopSequences" });
4026
4027
  }
4027
- const openaiOptions = await (0, import_provider_utils27.parseProviderOptions)({
4028
- provider: "openai",
4028
+ const providerOptionsName = this.config.provider.includes("azure") ? "azure" : "openai";
4029
+ let openaiOptions = await (0, import_provider_utils27.parseProviderOptions)({
4030
+ provider: providerOptionsName,
4029
4031
  providerOptions,
4030
4032
  schema: openaiResponsesProviderOptionsSchema
4031
4033
  });
4034
+ if (openaiOptions == null && providerOptionsName !== "openai") {
4035
+ openaiOptions = await (0, import_provider_utils27.parseProviderOptions)({
4036
+ provider: "openai",
4037
+ providerOptions,
4038
+ schema: openaiResponsesProviderOptionsSchema
4039
+ });
4040
+ }
4032
4041
  const isReasoningModel = (_a = openaiOptions == null ? void 0 : openaiOptions.forceReasoning) != null ? _a : modelCapabilities.isReasoningModel;
4033
4042
  if ((openaiOptions == null ? void 0 : openaiOptions.conversation) && (openaiOptions == null ? void 0 : openaiOptions.previousResponseId)) {
4034
4043
  warnings.push({
@@ -4055,6 +4064,7 @@ var OpenAIResponsesLanguageModel = class {
4055
4064
  prompt,
4056
4065
  toolNameMapping,
4057
4066
  systemMessageMode: (_b = openaiOptions == null ? void 0 : openaiOptions.systemMessageMode) != null ? _b : isReasoningModel ? "developer" : modelCapabilities.systemMessageMode,
4067
+ providerOptionsName,
4058
4068
  fileIdPrefixes: this.config.fileIdPrefixes,
4059
4069
  store: (_c = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _c : true,
4060
4070
  hasLocalShellTool: hasOpenAITool("openai.local_shell"),
@@ -4209,7 +4219,8 @@ var OpenAIResponsesLanguageModel = class {
4209
4219
  },
4210
4220
  warnings: [...warnings, ...toolWarnings],
4211
4221
  store,
4212
- toolNameMapping
4222
+ toolNameMapping,
4223
+ providerOptionsName
4213
4224
  };
4214
4225
  }
4215
4226
  async doGenerate(options) {
@@ -4218,13 +4229,13 @@ var OpenAIResponsesLanguageModel = class {
4218
4229
  args: body,
4219
4230
  warnings,
4220
4231
  webSearchToolName,
4221
- toolNameMapping
4232
+ toolNameMapping,
4233
+ providerOptionsName
4222
4234
  } = await this.getArgs(options);
4223
4235
  const url = this.config.url({
4224
4236
  path: "/responses",
4225
4237
  modelId: this.modelId
4226
4238
  });
4227
- const providerKey = this.config.provider.replace(".responses", "");
4228
4239
  const approvalRequestIdToDummyToolCallIdFromPrompt = extractApprovalRequestIdToToolCallIdMapping(options.prompt);
4229
4240
  const {
4230
4241
  responseHeaders,
@@ -4266,7 +4277,7 @@ var OpenAIResponsesLanguageModel = class {
4266
4277
  type: "reasoning",
4267
4278
  text: summary.text,
4268
4279
  providerMetadata: {
4269
- [providerKey]: {
4280
+ [providerOptionsName]: {
4270
4281
  itemId: part.id,
4271
4282
  reasoningEncryptedContent: (_a = part.encrypted_content) != null ? _a : null
4272
4283
  }
@@ -4302,7 +4313,7 @@ var OpenAIResponsesLanguageModel = class {
4302
4313
  action: part.action
4303
4314
  }),
4304
4315
  providerMetadata: {
4305
- [providerKey]: {
4316
+ [providerOptionsName]: {
4306
4317
  itemId: part.id
4307
4318
  }
4308
4319
  }
@@ -4320,7 +4331,7 @@ var OpenAIResponsesLanguageModel = class {
4320
4331
  }
4321
4332
  }),
4322
4333
  providerMetadata: {
4323
- [providerKey]: {
4334
+ [providerOptionsName]: {
4324
4335
  itemId: part.id
4325
4336
  }
4326
4337
  }
@@ -4329,7 +4340,7 @@ var OpenAIResponsesLanguageModel = class {
4329
4340
  }
4330
4341
  case "message": {
4331
4342
  for (const contentPart of part.content) {
4332
- if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
4343
+ if (((_c = (_b = options.providerOptions) == null ? void 0 : _b[providerOptionsName]) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
4333
4344
  logprobs.push(contentPart.logprobs);
4334
4345
  }
4335
4346
  const providerMetadata2 = {
@@ -4342,7 +4353,7 @@ var OpenAIResponsesLanguageModel = class {
4342
4353
  type: "text",
4343
4354
  text: contentPart.text,
4344
4355
  providerMetadata: {
4345
- [providerKey]: providerMetadata2
4356
+ [providerOptionsName]: providerMetadata2
4346
4357
  }
4347
4358
  });
4348
4359
  for (const annotation of contentPart.annotations) {
@@ -4364,7 +4375,7 @@ var OpenAIResponsesLanguageModel = class {
4364
4375
  filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
4365
4376
  ...annotation.file_id ? {
4366
4377
  providerMetadata: {
4367
- [providerKey]: {
4378
+ [providerOptionsName]: {
4368
4379
  fileId: annotation.file_id
4369
4380
  }
4370
4381
  }
@@ -4379,7 +4390,7 @@ var OpenAIResponsesLanguageModel = class {
4379
4390
  title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
4380
4391
  filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
4381
4392
  providerMetadata: {
4382
- [providerKey]: {
4393
+ [providerOptionsName]: {
4383
4394
  fileId: annotation.file_id,
4384
4395
  containerId: annotation.container_id,
4385
4396
  ...annotation.index != null ? { index: annotation.index } : {}
@@ -4395,7 +4406,7 @@ var OpenAIResponsesLanguageModel = class {
4395
4406
  title: annotation.file_id,
4396
4407
  filename: annotation.file_id,
4397
4408
  providerMetadata: {
4398
- [providerKey]: {
4409
+ [providerOptionsName]: {
4399
4410
  fileId: annotation.file_id,
4400
4411
  ...annotation.index != null ? { index: annotation.index } : {}
4401
4412
  }
@@ -4414,7 +4425,7 @@ var OpenAIResponsesLanguageModel = class {
4414
4425
  toolName: part.name,
4415
4426
  input: part.arguments,
4416
4427
  providerMetadata: {
4417
- [providerKey]: {
4428
+ [providerOptionsName]: {
4418
4429
  itemId: part.id
4419
4430
  }
4420
4431
  }
@@ -4465,7 +4476,7 @@ var OpenAIResponsesLanguageModel = class {
4465
4476
  ...part.error != null ? { error: part.error } : {}
4466
4477
  },
4467
4478
  providerMetadata: {
4468
- [providerKey]: {
4479
+ [providerOptionsName]: {
4469
4480
  itemId: part.id
4470
4481
  }
4471
4482
  }
@@ -4569,7 +4580,7 @@ var OpenAIResponsesLanguageModel = class {
4569
4580
  operation: part.operation
4570
4581
  }),
4571
4582
  providerMetadata: {
4572
- [providerKey]: {
4583
+ [providerOptionsName]: {
4573
4584
  itemId: part.id
4574
4585
  }
4575
4586
  }
@@ -4579,13 +4590,13 @@ var OpenAIResponsesLanguageModel = class {
4579
4590
  }
4580
4591
  }
4581
4592
  const providerMetadata = {
4582
- [providerKey]: { responseId: response.id }
4593
+ [providerOptionsName]: { responseId: response.id }
4583
4594
  };
4584
4595
  if (logprobs.length > 0) {
4585
- providerMetadata[providerKey].logprobs = logprobs;
4596
+ providerMetadata[providerOptionsName].logprobs = logprobs;
4586
4597
  }
4587
4598
  if (typeof response.service_tier === "string") {
4588
- providerMetadata[providerKey].serviceTier = response.service_tier;
4599
+ providerMetadata[providerOptionsName].serviceTier = response.service_tier;
4589
4600
  }
4590
4601
  const usage = response.usage;
4591
4602
  return {
@@ -4616,7 +4627,8 @@ var OpenAIResponsesLanguageModel = class {
4616
4627
  warnings,
4617
4628
  webSearchToolName,
4618
4629
  toolNameMapping,
4619
- store
4630
+ store,
4631
+ providerOptionsName
4620
4632
  } = await this.getArgs(options);
4621
4633
  const { responseHeaders, value: response } = await (0, import_provider_utils27.postJsonToApi)({
4622
4634
  url: this.config.url({
@@ -4636,7 +4648,6 @@ var OpenAIResponsesLanguageModel = class {
4636
4648
  fetch: this.config.fetch
4637
4649
  });
4638
4650
  const self = this;
4639
- const providerKey = this.config.provider.replace(".responses", "");
4640
4651
  const approvalRequestIdToDummyToolCallIdFromPrompt = extractApprovalRequestIdToToolCallIdMapping(options.prompt);
4641
4652
  const approvalRequestIdToDummyToolCallIdFromStream = /* @__PURE__ */ new Map();
4642
4653
  let finishReason = {
@@ -4802,7 +4813,7 @@ var OpenAIResponsesLanguageModel = class {
4802
4813
  type: "text-start",
4803
4814
  id: value.item.id,
4804
4815
  providerMetadata: {
4805
- [providerKey]: {
4816
+ [providerOptionsName]: {
4806
4817
  itemId: value.item.id
4807
4818
  }
4808
4819
  }
@@ -4816,7 +4827,7 @@ var OpenAIResponsesLanguageModel = class {
4816
4827
  type: "reasoning-start",
4817
4828
  id: `${value.item.id}:0`,
4818
4829
  providerMetadata: {
4819
- [providerKey]: {
4830
+ [providerOptionsName]: {
4820
4831
  itemId: value.item.id,
4821
4832
  reasoningEncryptedContent: (_a = value.item.encrypted_content) != null ? _a : null
4822
4833
  }
@@ -4829,7 +4840,7 @@ var OpenAIResponsesLanguageModel = class {
4829
4840
  type: "text-end",
4830
4841
  id: value.item.id,
4831
4842
  providerMetadata: {
4832
- [providerKey]: {
4843
+ [providerOptionsName]: {
4833
4844
  itemId: value.item.id,
4834
4845
  ...ongoingAnnotations.length > 0 && {
4835
4846
  annotations: ongoingAnnotations
@@ -4850,7 +4861,7 @@ var OpenAIResponsesLanguageModel = class {
4850
4861
  toolName: value.item.name,
4851
4862
  input: value.item.arguments,
4852
4863
  providerMetadata: {
4853
- [providerKey]: {
4864
+ [providerOptionsName]: {
4854
4865
  itemId: value.item.id
4855
4866
  }
4856
4867
  }
@@ -4951,7 +4962,7 @@ var OpenAIResponsesLanguageModel = class {
4951
4962
  ...value.item.error != null ? { error: value.item.error } : {}
4952
4963
  },
4953
4964
  providerMetadata: {
4954
- [providerKey]: {
4965
+ [providerOptionsName]: {
4955
4966
  itemId: value.item.id
4956
4967
  }
4957
4968
  }
@@ -4989,7 +5000,7 @@ var OpenAIResponsesLanguageModel = class {
4989
5000
  operation: value.item.operation
4990
5001
  }),
4991
5002
  providerMetadata: {
4992
- [providerKey]: {
5003
+ [providerOptionsName]: {
4993
5004
  itemId: value.item.id
4994
5005
  }
4995
5006
  }
@@ -5035,7 +5046,7 @@ var OpenAIResponsesLanguageModel = class {
5035
5046
  }
5036
5047
  }),
5037
5048
  providerMetadata: {
5038
- [providerKey]: { itemId: value.item.id }
5049
+ [providerOptionsName]: { itemId: value.item.id }
5039
5050
  }
5040
5051
  });
5041
5052
  } else if (value.item.type === "shell_call") {
@@ -5050,7 +5061,7 @@ var OpenAIResponsesLanguageModel = class {
5050
5061
  }
5051
5062
  }),
5052
5063
  providerMetadata: {
5053
- [providerKey]: { itemId: value.item.id }
5064
+ [providerOptionsName]: { itemId: value.item.id }
5054
5065
  }
5055
5066
  });
5056
5067
  } else if (value.item.type === "reasoning") {
@@ -5065,7 +5076,7 @@ var OpenAIResponsesLanguageModel = class {
5065
5076
  type: "reasoning-end",
5066
5077
  id: `${value.item.id}:${summaryIndex}`,
5067
5078
  providerMetadata: {
5068
- [providerKey]: {
5079
+ [providerOptionsName]: {
5069
5080
  itemId: value.item.id,
5070
5081
  reasoningEncryptedContent: (_k = value.item.encrypted_content) != null ? _k : null
5071
5082
  }
@@ -5171,7 +5182,7 @@ var OpenAIResponsesLanguageModel = class {
5171
5182
  id: value.item_id,
5172
5183
  delta: value.delta
5173
5184
  });
5174
- if (((_m = (_l = options.providerOptions) == null ? void 0 : _l.openai) == null ? void 0 : _m.logprobs) && value.logprobs) {
5185
+ if (((_m = (_l = options.providerOptions) == null ? void 0 : _l[providerOptionsName]) == null ? void 0 : _m.logprobs) && value.logprobs) {
5175
5186
  logprobs.push(value.logprobs);
5176
5187
  }
5177
5188
  } else if (value.type === "response.reasoning_summary_part.added") {
@@ -5186,7 +5197,7 @@ var OpenAIResponsesLanguageModel = class {
5186
5197
  type: "reasoning-end",
5187
5198
  id: `${value.item_id}:${summaryIndex}`,
5188
5199
  providerMetadata: {
5189
- [providerKey]: { itemId: value.item_id }
5200
+ [providerOptionsName]: { itemId: value.item_id }
5190
5201
  }
5191
5202
  });
5192
5203
  activeReasoningPart.summaryParts[summaryIndex] = "concluded";
@@ -5196,7 +5207,7 @@ var OpenAIResponsesLanguageModel = class {
5196
5207
  type: "reasoning-start",
5197
5208
  id: `${value.item_id}:${value.summary_index}`,
5198
5209
  providerMetadata: {
5199
- [providerKey]: {
5210
+ [providerOptionsName]: {
5200
5211
  itemId: value.item_id,
5201
5212
  reasoningEncryptedContent: (_o = (_n = activeReasoning[value.item_id]) == null ? void 0 : _n.encryptedContent) != null ? _o : null
5202
5213
  }
@@ -5209,7 +5220,7 @@ var OpenAIResponsesLanguageModel = class {
5209
5220
  id: `${value.item_id}:${value.summary_index}`,
5210
5221
  delta: value.delta,
5211
5222
  providerMetadata: {
5212
- [providerKey]: {
5223
+ [providerOptionsName]: {
5213
5224
  itemId: value.item_id
5214
5225
  }
5215
5226
  }
@@ -5220,7 +5231,7 @@ var OpenAIResponsesLanguageModel = class {
5220
5231
  type: "reasoning-end",
5221
5232
  id: `${value.item_id}:${value.summary_index}`,
5222
5233
  providerMetadata: {
5223
- [providerKey]: { itemId: value.item_id }
5234
+ [providerOptionsName]: { itemId: value.item_id }
5224
5235
  }
5225
5236
  });
5226
5237
  activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
@@ -5259,7 +5270,7 @@ var OpenAIResponsesLanguageModel = class {
5259
5270
  filename: (_A = value.annotation.filename) != null ? _A : value.annotation.file_id,
5260
5271
  ...value.annotation.file_id ? {
5261
5272
  providerMetadata: {
5262
- [providerKey]: {
5273
+ [providerOptionsName]: {
5263
5274
  fileId: value.annotation.file_id
5264
5275
  }
5265
5276
  }
@@ -5274,7 +5285,7 @@ var OpenAIResponsesLanguageModel = class {
5274
5285
  title: (_F = (_E = value.annotation.filename) != null ? _E : value.annotation.file_id) != null ? _F : "Document",
5275
5286
  filename: (_G = value.annotation.filename) != null ? _G : value.annotation.file_id,
5276
5287
  providerMetadata: {
5277
- [providerKey]: {
5288
+ [providerOptionsName]: {
5278
5289
  fileId: value.annotation.file_id,
5279
5290
  containerId: value.annotation.container_id,
5280
5291
  ...value.annotation.index != null ? { index: value.annotation.index } : {}
@@ -5290,7 +5301,7 @@ var OpenAIResponsesLanguageModel = class {
5290
5301
  title: value.annotation.file_id,
5291
5302
  filename: value.annotation.file_id,
5292
5303
  providerMetadata: {
5293
- [providerKey]: {
5304
+ [providerOptionsName]: {
5294
5305
  fileId: value.annotation.file_id,
5295
5306
  ...value.annotation.index != null ? { index: value.annotation.index } : {}
5296
5307
  }
@@ -5303,15 +5314,15 @@ var OpenAIResponsesLanguageModel = class {
5303
5314
  },
5304
5315
  flush(controller) {
5305
5316
  const providerMetadata = {
5306
- [providerKey]: {
5317
+ [providerOptionsName]: {
5307
5318
  responseId
5308
5319
  }
5309
5320
  };
5310
5321
  if (logprobs.length > 0) {
5311
- providerMetadata[providerKey].logprobs = logprobs;
5322
+ providerMetadata[providerOptionsName].logprobs = logprobs;
5312
5323
  }
5313
5324
  if (serviceTier !== void 0) {
5314
- providerMetadata[providerKey].serviceTier = serviceTier;
5325
+ providerMetadata[providerOptionsName].serviceTier = serviceTier;
5315
5326
  }
5316
5327
  controller.enqueue({
5317
5328
  type: "finish",
@@ -5744,7 +5755,7 @@ var OpenAITranscriptionModel = class {
5744
5755
  };
5745
5756
 
5746
5757
  // src/version.ts
5747
- var VERSION = true ? "3.0.1" : "0.0.0-test";
5758
+ var VERSION = true ? "3.0.3" : "0.0.0-test";
5748
5759
 
5749
5760
  // src/openai-provider.ts
5750
5761
  function createOpenAI(options = {}) {