@ai-sdk/openai 3.0.0-beta.65 → 3.0.0-beta.66

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -3683,6 +3683,7 @@ var OpenAIResponsesLanguageModel = class {
3683
3683
  path: "/responses",
3684
3684
  modelId: this.modelId
3685
3685
  });
3686
+ const providerKey = this.config.provider.replace(".responses", "");
3686
3687
  const {
3687
3688
  responseHeaders,
3688
3689
  value: response,
@@ -3723,7 +3724,7 @@ var OpenAIResponsesLanguageModel = class {
3723
3724
  type: "reasoning",
3724
3725
  text: summary.text,
3725
3726
  providerMetadata: {
3726
- openai: {
3727
+ [providerKey]: {
3727
3728
  itemId: part.id,
3728
3729
  reasoningEncryptedContent: (_a = part.encrypted_content) != null ? _a : null
3729
3730
  }
@@ -3759,7 +3760,7 @@ var OpenAIResponsesLanguageModel = class {
3759
3760
  action: part.action
3760
3761
  }),
3761
3762
  providerMetadata: {
3762
- openai: {
3763
+ [providerKey]: {
3763
3764
  itemId: part.id
3764
3765
  }
3765
3766
  }
@@ -3781,7 +3782,7 @@ var OpenAIResponsesLanguageModel = class {
3781
3782
  type: "text",
3782
3783
  text: contentPart.text,
3783
3784
  providerMetadata: {
3784
- openai: providerMetadata2
3785
+ [providerKey]: providerMetadata2
3785
3786
  }
3786
3787
  });
3787
3788
  for (const annotation of contentPart.annotations) {
@@ -3803,7 +3804,7 @@ var OpenAIResponsesLanguageModel = class {
3803
3804
  filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
3804
3805
  ...annotation.file_id ? {
3805
3806
  providerMetadata: {
3806
- openai: {
3807
+ [providerKey]: {
3807
3808
  fileId: annotation.file_id
3808
3809
  }
3809
3810
  }
@@ -3818,7 +3819,7 @@ var OpenAIResponsesLanguageModel = class {
3818
3819
  title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
3819
3820
  filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
3820
3821
  providerMetadata: {
3821
- openai: {
3822
+ [providerKey]: {
3822
3823
  fileId: annotation.file_id,
3823
3824
  containerId: annotation.container_id,
3824
3825
  ...annotation.index != null ? { index: annotation.index } : {}
@@ -3834,7 +3835,7 @@ var OpenAIResponsesLanguageModel = class {
3834
3835
  title: annotation.file_id,
3835
3836
  filename: annotation.file_id,
3836
3837
  providerMetadata: {
3837
- openai: {
3838
+ [providerKey]: {
3838
3839
  fileId: annotation.file_id,
3839
3840
  ...annotation.index != null ? { index: annotation.index } : {}
3840
3841
  }
@@ -3853,7 +3854,7 @@ var OpenAIResponsesLanguageModel = class {
3853
3854
  toolName: part.name,
3854
3855
  input: part.arguments,
3855
3856
  providerMetadata: {
3856
- openai: {
3857
+ [providerKey]: {
3857
3858
  itemId: part.id
3858
3859
  }
3859
3860
  }
@@ -4018,13 +4019,13 @@ var OpenAIResponsesLanguageModel = class {
4018
4019
  }
4019
4020
  }
4020
4021
  const providerMetadata = {
4021
- openai: { responseId: response.id }
4022
+ [providerKey]: { responseId: response.id }
4022
4023
  };
4023
4024
  if (logprobs.length > 0) {
4024
- providerMetadata.openai.logprobs = logprobs;
4025
+ providerMetadata[providerKey].logprobs = logprobs;
4025
4026
  }
4026
4027
  if (typeof response.service_tier === "string") {
4027
- providerMetadata.openai.serviceTier = response.service_tier;
4028
+ providerMetadata[providerKey].serviceTier = response.service_tier;
4028
4029
  }
4029
4030
  const usage = response.usage;
4030
4031
  return {
@@ -4077,6 +4078,7 @@ var OpenAIResponsesLanguageModel = class {
4077
4078
  fetch: this.config.fetch
4078
4079
  });
4079
4080
  const self = this;
4081
+ const providerKey = this.config.provider.replace(".responses", "");
4080
4082
  let finishReason = "unknown";
4081
4083
  const usage = {
4082
4084
  inputTokens: void 0,
@@ -4200,7 +4202,7 @@ var OpenAIResponsesLanguageModel = class {
4200
4202
  type: "text-start",
4201
4203
  id: value.item.id,
4202
4204
  providerMetadata: {
4203
- openai: {
4205
+ [providerKey]: {
4204
4206
  itemId: value.item.id
4205
4207
  }
4206
4208
  }
@@ -4214,7 +4216,7 @@ var OpenAIResponsesLanguageModel = class {
4214
4216
  type: "reasoning-start",
4215
4217
  id: `${value.item.id}:0`,
4216
4218
  providerMetadata: {
4217
- openai: {
4219
+ [providerKey]: {
4218
4220
  itemId: value.item.id,
4219
4221
  reasoningEncryptedContent: (_a = value.item.encrypted_content) != null ? _a : null
4220
4222
  }
@@ -4235,7 +4237,7 @@ var OpenAIResponsesLanguageModel = class {
4235
4237
  toolName: value.item.name,
4236
4238
  input: value.item.arguments,
4237
4239
  providerMetadata: {
4238
- openai: {
4240
+ [providerKey]: {
4239
4241
  itemId: value.item.id
4240
4242
  }
4241
4243
  }
@@ -4373,7 +4375,7 @@ var OpenAIResponsesLanguageModel = class {
4373
4375
  }
4374
4376
  }),
4375
4377
  providerMetadata: {
4376
- openai: { itemId: value.item.id }
4378
+ [providerKey]: { itemId: value.item.id }
4377
4379
  }
4378
4380
  });
4379
4381
  } else if (value.item.type === "reasoning") {
@@ -4388,7 +4390,7 @@ var OpenAIResponsesLanguageModel = class {
4388
4390
  type: "reasoning-end",
4389
4391
  id: `${value.item.id}:${summaryIndex}`,
4390
4392
  providerMetadata: {
4391
- openai: {
4393
+ [providerKey]: {
4392
4394
  itemId: value.item.id,
4393
4395
  reasoningEncryptedContent: (_d = value.item.encrypted_content) != null ? _d : null
4394
4396
  }
@@ -4478,7 +4480,9 @@ var OpenAIResponsesLanguageModel = class {
4478
4480
  controller.enqueue({
4479
4481
  type: "reasoning-end",
4480
4482
  id: `${value.item_id}:${summaryIndex}`,
4481
- providerMetadata: { openai: { itemId: value.item_id } }
4483
+ providerMetadata: {
4484
+ [providerKey]: { itemId: value.item_id }
4485
+ }
4482
4486
  });
4483
4487
  activeReasoningPart.summaryParts[summaryIndex] = "concluded";
4484
4488
  }
@@ -4487,7 +4491,7 @@ var OpenAIResponsesLanguageModel = class {
4487
4491
  type: "reasoning-start",
4488
4492
  id: `${value.item_id}:${value.summary_index}`,
4489
4493
  providerMetadata: {
4490
- openai: {
4494
+ [providerKey]: {
4491
4495
  itemId: value.item_id,
4492
4496
  reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
4493
4497
  }
@@ -4500,7 +4504,7 @@ var OpenAIResponsesLanguageModel = class {
4500
4504
  id: `${value.item_id}:${value.summary_index}`,
4501
4505
  delta: value.delta,
4502
4506
  providerMetadata: {
4503
- openai: {
4507
+ [providerKey]: {
4504
4508
  itemId: value.item_id
4505
4509
  }
4506
4510
  }
@@ -4511,7 +4515,7 @@ var OpenAIResponsesLanguageModel = class {
4511
4515
  type: "reasoning-end",
4512
4516
  id: `${value.item_id}:${value.summary_index}`,
4513
4517
  providerMetadata: {
4514
- openai: { itemId: value.item_id }
4518
+ [providerKey]: { itemId: value.item_id }
4515
4519
  }
4516
4520
  });
4517
4521
  activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
@@ -4551,7 +4555,7 @@ var OpenAIResponsesLanguageModel = class {
4551
4555
  filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id,
4552
4556
  ...value.annotation.file_id ? {
4553
4557
  providerMetadata: {
4554
- openai: {
4558
+ [providerKey]: {
4555
4559
  fileId: value.annotation.file_id
4556
4560
  }
4557
4561
  }
@@ -4566,7 +4570,7 @@ var OpenAIResponsesLanguageModel = class {
4566
4570
  title: (_A = (_z = value.annotation.filename) != null ? _z : value.annotation.file_id) != null ? _A : "Document",
4567
4571
  filename: (_B = value.annotation.filename) != null ? _B : value.annotation.file_id,
4568
4572
  providerMetadata: {
4569
- openai: {
4573
+ [providerKey]: {
4570
4574
  fileId: value.annotation.file_id,
4571
4575
  containerId: value.annotation.container_id,
4572
4576
  ...value.annotation.index != null ? { index: value.annotation.index } : {}
@@ -4582,7 +4586,7 @@ var OpenAIResponsesLanguageModel = class {
4582
4586
  title: value.annotation.file_id,
4583
4587
  filename: value.annotation.file_id,
4584
4588
  providerMetadata: {
4585
- openai: {
4589
+ [providerKey]: {
4586
4590
  fileId: value.annotation.file_id,
4587
4591
  ...value.annotation.index != null ? { index: value.annotation.index } : {}
4588
4592
  }
@@ -4594,7 +4598,7 @@ var OpenAIResponsesLanguageModel = class {
4594
4598
  type: "text-end",
4595
4599
  id: value.item.id,
4596
4600
  providerMetadata: {
4597
- openai: {
4601
+ [providerKey]: {
4598
4602
  itemId: value.item.id,
4599
4603
  ...ongoingAnnotations.length > 0 && {
4600
4604
  annotations: ongoingAnnotations
@@ -4608,15 +4612,15 @@ var OpenAIResponsesLanguageModel = class {
4608
4612
  },
4609
4613
  flush(controller) {
4610
4614
  const providerMetadata = {
4611
- openai: {
4615
+ [providerKey]: {
4612
4616
  responseId
4613
4617
  }
4614
4618
  };
4615
4619
  if (logprobs.length > 0) {
4616
- providerMetadata.openai.logprobs = logprobs;
4620
+ providerMetadata[providerKey].logprobs = logprobs;
4617
4621
  }
4618
4622
  if (serviceTier !== void 0) {
4619
- providerMetadata.openai.serviceTier = serviceTier;
4623
+ providerMetadata[providerKey].serviceTier = serviceTier;
4620
4624
  }
4621
4625
  controller.enqueue({
4622
4626
  type: "finish",
@@ -5060,7 +5064,7 @@ var OpenAITranscriptionModel = class {
5060
5064
  };
5061
5065
 
5062
5066
  // src/version.ts
5063
- var VERSION = true ? "3.0.0-beta.65" : "0.0.0-test";
5067
+ var VERSION = true ? "3.0.0-beta.66" : "0.0.0-test";
5064
5068
 
5065
5069
  // src/openai-provider.ts
5066
5070
  function createOpenAI(options = {}) {