@ai-sdk/openai 3.0.0-beta.65 → 3.0.0-beta.66

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3947,6 +3947,7 @@ var OpenAIResponsesLanguageModel = class {
3947
3947
  path: "/responses",
3948
3948
  modelId: this.modelId
3949
3949
  });
3950
+ const providerKey = this.config.provider.replace(".responses", "");
3950
3951
  const {
3951
3952
  responseHeaders,
3952
3953
  value: response,
@@ -3987,7 +3988,7 @@ var OpenAIResponsesLanguageModel = class {
3987
3988
  type: "reasoning",
3988
3989
  text: summary.text,
3989
3990
  providerMetadata: {
3990
- openai: {
3991
+ [providerKey]: {
3991
3992
  itemId: part.id,
3992
3993
  reasoningEncryptedContent: (_a = part.encrypted_content) != null ? _a : null
3993
3994
  }
@@ -4023,7 +4024,7 @@ var OpenAIResponsesLanguageModel = class {
4023
4024
  action: part.action
4024
4025
  }),
4025
4026
  providerMetadata: {
4026
- openai: {
4027
+ [providerKey]: {
4027
4028
  itemId: part.id
4028
4029
  }
4029
4030
  }
@@ -4045,7 +4046,7 @@ var OpenAIResponsesLanguageModel = class {
4045
4046
  type: "text",
4046
4047
  text: contentPart.text,
4047
4048
  providerMetadata: {
4048
- openai: providerMetadata2
4049
+ [providerKey]: providerMetadata2
4049
4050
  }
4050
4051
  });
4051
4052
  for (const annotation of contentPart.annotations) {
@@ -4067,7 +4068,7 @@ var OpenAIResponsesLanguageModel = class {
4067
4068
  filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
4068
4069
  ...annotation.file_id ? {
4069
4070
  providerMetadata: {
4070
- openai: {
4071
+ [providerKey]: {
4071
4072
  fileId: annotation.file_id
4072
4073
  }
4073
4074
  }
@@ -4082,7 +4083,7 @@ var OpenAIResponsesLanguageModel = class {
4082
4083
  title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
4083
4084
  filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
4084
4085
  providerMetadata: {
4085
- openai: {
4086
+ [providerKey]: {
4086
4087
  fileId: annotation.file_id,
4087
4088
  containerId: annotation.container_id,
4088
4089
  ...annotation.index != null ? { index: annotation.index } : {}
@@ -4098,7 +4099,7 @@ var OpenAIResponsesLanguageModel = class {
4098
4099
  title: annotation.file_id,
4099
4100
  filename: annotation.file_id,
4100
4101
  providerMetadata: {
4101
- openai: {
4102
+ [providerKey]: {
4102
4103
  fileId: annotation.file_id,
4103
4104
  ...annotation.index != null ? { index: annotation.index } : {}
4104
4105
  }
@@ -4117,7 +4118,7 @@ var OpenAIResponsesLanguageModel = class {
4117
4118
  toolName: part.name,
4118
4119
  input: part.arguments,
4119
4120
  providerMetadata: {
4120
- openai: {
4121
+ [providerKey]: {
4121
4122
  itemId: part.id
4122
4123
  }
4123
4124
  }
@@ -4282,13 +4283,13 @@ var OpenAIResponsesLanguageModel = class {
4282
4283
  }
4283
4284
  }
4284
4285
  const providerMetadata = {
4285
- openai: { responseId: response.id }
4286
+ [providerKey]: { responseId: response.id }
4286
4287
  };
4287
4288
  if (logprobs.length > 0) {
4288
- providerMetadata.openai.logprobs = logprobs;
4289
+ providerMetadata[providerKey].logprobs = logprobs;
4289
4290
  }
4290
4291
  if (typeof response.service_tier === "string") {
4291
- providerMetadata.openai.serviceTier = response.service_tier;
4292
+ providerMetadata[providerKey].serviceTier = response.service_tier;
4292
4293
  }
4293
4294
  const usage = response.usage;
4294
4295
  return {
@@ -4341,6 +4342,7 @@ var OpenAIResponsesLanguageModel = class {
4341
4342
  fetch: this.config.fetch
4342
4343
  });
4343
4344
  const self = this;
4345
+ const providerKey = this.config.provider.replace(".responses", "");
4344
4346
  let finishReason = "unknown";
4345
4347
  const usage = {
4346
4348
  inputTokens: void 0,
@@ -4464,7 +4466,7 @@ var OpenAIResponsesLanguageModel = class {
4464
4466
  type: "text-start",
4465
4467
  id: value.item.id,
4466
4468
  providerMetadata: {
4467
- openai: {
4469
+ [providerKey]: {
4468
4470
  itemId: value.item.id
4469
4471
  }
4470
4472
  }
@@ -4478,7 +4480,7 @@ var OpenAIResponsesLanguageModel = class {
4478
4480
  type: "reasoning-start",
4479
4481
  id: `${value.item.id}:0`,
4480
4482
  providerMetadata: {
4481
- openai: {
4483
+ [providerKey]: {
4482
4484
  itemId: value.item.id,
4483
4485
  reasoningEncryptedContent: (_a = value.item.encrypted_content) != null ? _a : null
4484
4486
  }
@@ -4499,7 +4501,7 @@ var OpenAIResponsesLanguageModel = class {
4499
4501
  toolName: value.item.name,
4500
4502
  input: value.item.arguments,
4501
4503
  providerMetadata: {
4502
- openai: {
4504
+ [providerKey]: {
4503
4505
  itemId: value.item.id
4504
4506
  }
4505
4507
  }
@@ -4637,7 +4639,7 @@ var OpenAIResponsesLanguageModel = class {
4637
4639
  }
4638
4640
  }),
4639
4641
  providerMetadata: {
4640
- openai: { itemId: value.item.id }
4642
+ [providerKey]: { itemId: value.item.id }
4641
4643
  }
4642
4644
  });
4643
4645
  } else if (value.item.type === "reasoning") {
@@ -4652,7 +4654,7 @@ var OpenAIResponsesLanguageModel = class {
4652
4654
  type: "reasoning-end",
4653
4655
  id: `${value.item.id}:${summaryIndex}`,
4654
4656
  providerMetadata: {
4655
- openai: {
4657
+ [providerKey]: {
4656
4658
  itemId: value.item.id,
4657
4659
  reasoningEncryptedContent: (_d = value.item.encrypted_content) != null ? _d : null
4658
4660
  }
@@ -4742,7 +4744,9 @@ var OpenAIResponsesLanguageModel = class {
4742
4744
  controller.enqueue({
4743
4745
  type: "reasoning-end",
4744
4746
  id: `${value.item_id}:${summaryIndex}`,
4745
- providerMetadata: { openai: { itemId: value.item_id } }
4747
+ providerMetadata: {
4748
+ [providerKey]: { itemId: value.item_id }
4749
+ }
4746
4750
  });
4747
4751
  activeReasoningPart.summaryParts[summaryIndex] = "concluded";
4748
4752
  }
@@ -4751,7 +4755,7 @@ var OpenAIResponsesLanguageModel = class {
4751
4755
  type: "reasoning-start",
4752
4756
  id: `${value.item_id}:${value.summary_index}`,
4753
4757
  providerMetadata: {
4754
- openai: {
4758
+ [providerKey]: {
4755
4759
  itemId: value.item_id,
4756
4760
  reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
4757
4761
  }
@@ -4764,7 +4768,7 @@ var OpenAIResponsesLanguageModel = class {
4764
4768
  id: `${value.item_id}:${value.summary_index}`,
4765
4769
  delta: value.delta,
4766
4770
  providerMetadata: {
4767
- openai: {
4771
+ [providerKey]: {
4768
4772
  itemId: value.item_id
4769
4773
  }
4770
4774
  }
@@ -4775,7 +4779,7 @@ var OpenAIResponsesLanguageModel = class {
4775
4779
  type: "reasoning-end",
4776
4780
  id: `${value.item_id}:${value.summary_index}`,
4777
4781
  providerMetadata: {
4778
- openai: { itemId: value.item_id }
4782
+ [providerKey]: { itemId: value.item_id }
4779
4783
  }
4780
4784
  });
4781
4785
  activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
@@ -4815,7 +4819,7 @@ var OpenAIResponsesLanguageModel = class {
4815
4819
  filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id,
4816
4820
  ...value.annotation.file_id ? {
4817
4821
  providerMetadata: {
4818
- openai: {
4822
+ [providerKey]: {
4819
4823
  fileId: value.annotation.file_id
4820
4824
  }
4821
4825
  }
@@ -4830,7 +4834,7 @@ var OpenAIResponsesLanguageModel = class {
4830
4834
  title: (_A = (_z = value.annotation.filename) != null ? _z : value.annotation.file_id) != null ? _A : "Document",
4831
4835
  filename: (_B = value.annotation.filename) != null ? _B : value.annotation.file_id,
4832
4836
  providerMetadata: {
4833
- openai: {
4837
+ [providerKey]: {
4834
4838
  fileId: value.annotation.file_id,
4835
4839
  containerId: value.annotation.container_id,
4836
4840
  ...value.annotation.index != null ? { index: value.annotation.index } : {}
@@ -4846,7 +4850,7 @@ var OpenAIResponsesLanguageModel = class {
4846
4850
  title: value.annotation.file_id,
4847
4851
  filename: value.annotation.file_id,
4848
4852
  providerMetadata: {
4849
- openai: {
4853
+ [providerKey]: {
4850
4854
  fileId: value.annotation.file_id,
4851
4855
  ...value.annotation.index != null ? { index: value.annotation.index } : {}
4852
4856
  }
@@ -4858,7 +4862,7 @@ var OpenAIResponsesLanguageModel = class {
4858
4862
  type: "text-end",
4859
4863
  id: value.item.id,
4860
4864
  providerMetadata: {
4861
- openai: {
4865
+ [providerKey]: {
4862
4866
  itemId: value.item.id,
4863
4867
  ...ongoingAnnotations.length > 0 && {
4864
4868
  annotations: ongoingAnnotations
@@ -4872,15 +4876,15 @@ var OpenAIResponsesLanguageModel = class {
4872
4876
  },
4873
4877
  flush(controller) {
4874
4878
  const providerMetadata = {
4875
- openai: {
4879
+ [providerKey]: {
4876
4880
  responseId
4877
4881
  }
4878
4882
  };
4879
4883
  if (logprobs.length > 0) {
4880
- providerMetadata.openai.logprobs = logprobs;
4884
+ providerMetadata[providerKey].logprobs = logprobs;
4881
4885
  }
4882
4886
  if (serviceTier !== void 0) {
4883
- providerMetadata.openai.serviceTier = serviceTier;
4887
+ providerMetadata[providerKey].serviceTier = serviceTier;
4884
4888
  }
4885
4889
  controller.enqueue({
4886
4890
  type: "finish",