@ai-sdk/openai 3.0.0-beta.65 → 3.0.0-beta.66

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3908,6 +3908,7 @@ var OpenAIResponsesLanguageModel = class {
3908
3908
  path: "/responses",
3909
3909
  modelId: this.modelId
3910
3910
  });
3911
+ const providerKey = this.config.provider.replace(".responses", "");
3911
3912
  const {
3912
3913
  responseHeaders,
3913
3914
  value: response,
@@ -3948,7 +3949,7 @@ var OpenAIResponsesLanguageModel = class {
3948
3949
  type: "reasoning",
3949
3950
  text: summary.text,
3950
3951
  providerMetadata: {
3951
- openai: {
3952
+ [providerKey]: {
3952
3953
  itemId: part.id,
3953
3954
  reasoningEncryptedContent: (_a = part.encrypted_content) != null ? _a : null
3954
3955
  }
@@ -3984,7 +3985,7 @@ var OpenAIResponsesLanguageModel = class {
3984
3985
  action: part.action
3985
3986
  }),
3986
3987
  providerMetadata: {
3987
- openai: {
3988
+ [providerKey]: {
3988
3989
  itemId: part.id
3989
3990
  }
3990
3991
  }
@@ -4006,7 +4007,7 @@ var OpenAIResponsesLanguageModel = class {
4006
4007
  type: "text",
4007
4008
  text: contentPart.text,
4008
4009
  providerMetadata: {
4009
- openai: providerMetadata2
4010
+ [providerKey]: providerMetadata2
4010
4011
  }
4011
4012
  });
4012
4013
  for (const annotation of contentPart.annotations) {
@@ -4028,7 +4029,7 @@ var OpenAIResponsesLanguageModel = class {
4028
4029
  filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
4029
4030
  ...annotation.file_id ? {
4030
4031
  providerMetadata: {
4031
- openai: {
4032
+ [providerKey]: {
4032
4033
  fileId: annotation.file_id
4033
4034
  }
4034
4035
  }
@@ -4043,7 +4044,7 @@ var OpenAIResponsesLanguageModel = class {
4043
4044
  title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
4044
4045
  filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
4045
4046
  providerMetadata: {
4046
- openai: {
4047
+ [providerKey]: {
4047
4048
  fileId: annotation.file_id,
4048
4049
  containerId: annotation.container_id,
4049
4050
  ...annotation.index != null ? { index: annotation.index } : {}
@@ -4059,7 +4060,7 @@ var OpenAIResponsesLanguageModel = class {
4059
4060
  title: annotation.file_id,
4060
4061
  filename: annotation.file_id,
4061
4062
  providerMetadata: {
4062
- openai: {
4063
+ [providerKey]: {
4063
4064
  fileId: annotation.file_id,
4064
4065
  ...annotation.index != null ? { index: annotation.index } : {}
4065
4066
  }
@@ -4078,7 +4079,7 @@ var OpenAIResponsesLanguageModel = class {
4078
4079
  toolName: part.name,
4079
4080
  input: part.arguments,
4080
4081
  providerMetadata: {
4081
- openai: {
4082
+ [providerKey]: {
4082
4083
  itemId: part.id
4083
4084
  }
4084
4085
  }
@@ -4243,13 +4244,13 @@ var OpenAIResponsesLanguageModel = class {
4243
4244
  }
4244
4245
  }
4245
4246
  const providerMetadata = {
4246
- openai: { responseId: response.id }
4247
+ [providerKey]: { responseId: response.id }
4247
4248
  };
4248
4249
  if (logprobs.length > 0) {
4249
- providerMetadata.openai.logprobs = logprobs;
4250
+ providerMetadata[providerKey].logprobs = logprobs;
4250
4251
  }
4251
4252
  if (typeof response.service_tier === "string") {
4252
- providerMetadata.openai.serviceTier = response.service_tier;
4253
+ providerMetadata[providerKey].serviceTier = response.service_tier;
4253
4254
  }
4254
4255
  const usage = response.usage;
4255
4256
  return {
@@ -4302,6 +4303,7 @@ var OpenAIResponsesLanguageModel = class {
4302
4303
  fetch: this.config.fetch
4303
4304
  });
4304
4305
  const self = this;
4306
+ const providerKey = this.config.provider.replace(".responses", "");
4305
4307
  let finishReason = "unknown";
4306
4308
  const usage = {
4307
4309
  inputTokens: void 0,
@@ -4425,7 +4427,7 @@ var OpenAIResponsesLanguageModel = class {
4425
4427
  type: "text-start",
4426
4428
  id: value.item.id,
4427
4429
  providerMetadata: {
4428
- openai: {
4430
+ [providerKey]: {
4429
4431
  itemId: value.item.id
4430
4432
  }
4431
4433
  }
@@ -4439,7 +4441,7 @@ var OpenAIResponsesLanguageModel = class {
4439
4441
  type: "reasoning-start",
4440
4442
  id: `${value.item.id}:0`,
4441
4443
  providerMetadata: {
4442
- openai: {
4444
+ [providerKey]: {
4443
4445
  itemId: value.item.id,
4444
4446
  reasoningEncryptedContent: (_a = value.item.encrypted_content) != null ? _a : null
4445
4447
  }
@@ -4460,7 +4462,7 @@ var OpenAIResponsesLanguageModel = class {
4460
4462
  toolName: value.item.name,
4461
4463
  input: value.item.arguments,
4462
4464
  providerMetadata: {
4463
- openai: {
4465
+ [providerKey]: {
4464
4466
  itemId: value.item.id
4465
4467
  }
4466
4468
  }
@@ -4598,7 +4600,7 @@ var OpenAIResponsesLanguageModel = class {
4598
4600
  }
4599
4601
  }),
4600
4602
  providerMetadata: {
4601
- openai: { itemId: value.item.id }
4603
+ [providerKey]: { itemId: value.item.id }
4602
4604
  }
4603
4605
  });
4604
4606
  } else if (value.item.type === "reasoning") {
@@ -4613,7 +4615,7 @@ var OpenAIResponsesLanguageModel = class {
4613
4615
  type: "reasoning-end",
4614
4616
  id: `${value.item.id}:${summaryIndex}`,
4615
4617
  providerMetadata: {
4616
- openai: {
4618
+ [providerKey]: {
4617
4619
  itemId: value.item.id,
4618
4620
  reasoningEncryptedContent: (_d = value.item.encrypted_content) != null ? _d : null
4619
4621
  }
@@ -4703,7 +4705,9 @@ var OpenAIResponsesLanguageModel = class {
4703
4705
  controller.enqueue({
4704
4706
  type: "reasoning-end",
4705
4707
  id: `${value.item_id}:${summaryIndex}`,
4706
- providerMetadata: { openai: { itemId: value.item_id } }
4708
+ providerMetadata: {
4709
+ [providerKey]: { itemId: value.item_id }
4710
+ }
4707
4711
  });
4708
4712
  activeReasoningPart.summaryParts[summaryIndex] = "concluded";
4709
4713
  }
@@ -4712,7 +4716,7 @@ var OpenAIResponsesLanguageModel = class {
4712
4716
  type: "reasoning-start",
4713
4717
  id: `${value.item_id}:${value.summary_index}`,
4714
4718
  providerMetadata: {
4715
- openai: {
4719
+ [providerKey]: {
4716
4720
  itemId: value.item_id,
4717
4721
  reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
4718
4722
  }
@@ -4725,7 +4729,7 @@ var OpenAIResponsesLanguageModel = class {
4725
4729
  id: `${value.item_id}:${value.summary_index}`,
4726
4730
  delta: value.delta,
4727
4731
  providerMetadata: {
4728
- openai: {
4732
+ [providerKey]: {
4729
4733
  itemId: value.item_id
4730
4734
  }
4731
4735
  }
@@ -4736,7 +4740,7 @@ var OpenAIResponsesLanguageModel = class {
4736
4740
  type: "reasoning-end",
4737
4741
  id: `${value.item_id}:${value.summary_index}`,
4738
4742
  providerMetadata: {
4739
- openai: { itemId: value.item_id }
4743
+ [providerKey]: { itemId: value.item_id }
4740
4744
  }
4741
4745
  });
4742
4746
  activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
@@ -4776,7 +4780,7 @@ var OpenAIResponsesLanguageModel = class {
4776
4780
  filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id,
4777
4781
  ...value.annotation.file_id ? {
4778
4782
  providerMetadata: {
4779
- openai: {
4783
+ [providerKey]: {
4780
4784
  fileId: value.annotation.file_id
4781
4785
  }
4782
4786
  }
@@ -4791,7 +4795,7 @@ var OpenAIResponsesLanguageModel = class {
4791
4795
  title: (_A = (_z = value.annotation.filename) != null ? _z : value.annotation.file_id) != null ? _A : "Document",
4792
4796
  filename: (_B = value.annotation.filename) != null ? _B : value.annotation.file_id,
4793
4797
  providerMetadata: {
4794
- openai: {
4798
+ [providerKey]: {
4795
4799
  fileId: value.annotation.file_id,
4796
4800
  containerId: value.annotation.container_id,
4797
4801
  ...value.annotation.index != null ? { index: value.annotation.index } : {}
@@ -4807,7 +4811,7 @@ var OpenAIResponsesLanguageModel = class {
4807
4811
  title: value.annotation.file_id,
4808
4812
  filename: value.annotation.file_id,
4809
4813
  providerMetadata: {
4810
- openai: {
4814
+ [providerKey]: {
4811
4815
  fileId: value.annotation.file_id,
4812
4816
  ...value.annotation.index != null ? { index: value.annotation.index } : {}
4813
4817
  }
@@ -4819,7 +4823,7 @@ var OpenAIResponsesLanguageModel = class {
4819
4823
  type: "text-end",
4820
4824
  id: value.item.id,
4821
4825
  providerMetadata: {
4822
- openai: {
4826
+ [providerKey]: {
4823
4827
  itemId: value.item.id,
4824
4828
  ...ongoingAnnotations.length > 0 && {
4825
4829
  annotations: ongoingAnnotations
@@ -4833,15 +4837,15 @@ var OpenAIResponsesLanguageModel = class {
4833
4837
  },
4834
4838
  flush(controller) {
4835
4839
  const providerMetadata = {
4836
- openai: {
4840
+ [providerKey]: {
4837
4841
  responseId
4838
4842
  }
4839
4843
  };
4840
4844
  if (logprobs.length > 0) {
4841
- providerMetadata.openai.logprobs = logprobs;
4845
+ providerMetadata[providerKey].logprobs = logprobs;
4842
4846
  }
4843
4847
  if (serviceTier !== void 0) {
4844
- providerMetadata.openai.serviceTier = serviceTier;
4848
+ providerMetadata[providerKey].serviceTier = serviceTier;
4845
4849
  }
4846
4850
  controller.enqueue({
4847
4851
  type: "finish",