@ai-sdk/openai 3.0.0-beta.50 → 3.0.0-beta.51

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2718,6 +2718,20 @@ var openaiResponsesChunkSchema = lazySchema12(
2718
2718
  start_index: z14.number().nullish(),
2719
2719
  end_index: z14.number().nullish(),
2720
2720
  quote: z14.string().nullish()
2721
+ }),
2722
+ z14.object({
2723
+ type: z14.literal("container_file_citation"),
2724
+ container_id: z14.string(),
2725
+ file_id: z14.string(),
2726
+ filename: z14.string().nullish(),
2727
+ start_index: z14.number().nullish(),
2728
+ end_index: z14.number().nullish(),
2729
+ index: z14.number().nullish()
2730
+ }),
2731
+ z14.object({
2732
+ type: z14.literal("file_path"),
2733
+ file_id: z14.string(),
2734
+ index: z14.number().nullish()
2721
2735
  })
2722
2736
  ])
2723
2737
  }),
@@ -2803,7 +2817,18 @@ var openaiResponsesResponseSchema = lazySchema12(
2803
2817
  quote: z14.string().nullish()
2804
2818
  }),
2805
2819
  z14.object({
2806
- type: z14.literal("container_file_citation")
2820
+ type: z14.literal("container_file_citation"),
2821
+ container_id: z14.string(),
2822
+ file_id: z14.string(),
2823
+ filename: z14.string().nullish(),
2824
+ start_index: z14.number().nullish(),
2825
+ end_index: z14.number().nullish(),
2826
+ index: z14.number().nullish()
2827
+ }),
2828
+ z14.object({
2829
+ type: z14.literal("file_path"),
2830
+ file_id: z14.string(),
2831
+ index: z14.number().nullish()
2807
2832
  })
2808
2833
  ])
2809
2834
  )
@@ -3641,7 +3666,7 @@ var OpenAIResponsesLanguageModel = class {
3641
3666
  };
3642
3667
  }
3643
3668
  async doGenerate(options) {
3644
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
3669
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B;
3645
3670
  const {
3646
3671
  args: body,
3647
3672
  warnings,
@@ -3739,13 +3764,17 @@ var OpenAIResponsesLanguageModel = class {
3739
3764
  if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
3740
3765
  logprobs.push(contentPart.logprobs);
3741
3766
  }
3767
+ const providerMetadata2 = {
3768
+ itemId: part.id,
3769
+ ...contentPart.annotations.length > 0 && {
3770
+ annotations: contentPart.annotations
3771
+ }
3772
+ };
3742
3773
  content.push({
3743
3774
  type: "text",
3744
3775
  text: contentPart.text,
3745
3776
  providerMetadata: {
3746
- openai: {
3747
- itemId: part.id
3748
- }
3777
+ openai: providerMetadata2
3749
3778
  }
3750
3779
  });
3751
3780
  for (const annotation of contentPart.annotations) {
@@ -3773,6 +3802,37 @@ var OpenAIResponsesLanguageModel = class {
3773
3802
  }
3774
3803
  } : {}
3775
3804
  });
3805
+ } else if (annotation.type === "container_file_citation") {
3806
+ content.push({
3807
+ type: "source",
3808
+ sourceType: "document",
3809
+ id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : generateId2(),
3810
+ mediaType: "text/plain",
3811
+ title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
3812
+ filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
3813
+ providerMetadata: {
3814
+ openai: {
3815
+ fileId: annotation.file_id,
3816
+ containerId: annotation.container_id,
3817
+ ...annotation.index != null ? { index: annotation.index } : {}
3818
+ }
3819
+ }
3820
+ });
3821
+ } else if (annotation.type === "file_path") {
3822
+ content.push({
3823
+ type: "source",
3824
+ sourceType: "document",
3825
+ id: (_u = (_t = (_s = this.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : generateId2(),
3826
+ mediaType: "application/octet-stream",
3827
+ title: annotation.file_id,
3828
+ filename: annotation.file_id,
3829
+ providerMetadata: {
3830
+ openai: {
3831
+ fileId: annotation.file_id,
3832
+ ...annotation.index != null ? { index: annotation.index } : {}
3833
+ }
3834
+ }
3835
+ });
3776
3836
  }
3777
3837
  }
3778
3838
  }
@@ -3842,13 +3902,13 @@ var OpenAIResponsesLanguageModel = class {
3842
3902
  toolName: "file_search",
3843
3903
  result: {
3844
3904
  queries: part.queries,
3845
- results: (_n = (_m = part.results) == null ? void 0 : _m.map((result) => ({
3905
+ results: (_w = (_v = part.results) == null ? void 0 : _v.map((result) => ({
3846
3906
  attributes: result.attributes,
3847
3907
  fileId: result.file_id,
3848
3908
  filename: result.filename,
3849
3909
  score: result.score,
3850
3910
  text: result.text
3851
- }))) != null ? _n : null
3911
+ }))) != null ? _w : null
3852
3912
  }
3853
3913
  });
3854
3914
  break;
@@ -3888,15 +3948,15 @@ var OpenAIResponsesLanguageModel = class {
3888
3948
  return {
3889
3949
  content,
3890
3950
  finishReason: mapOpenAIResponseFinishReason({
3891
- finishReason: (_o = response.incomplete_details) == null ? void 0 : _o.reason,
3951
+ finishReason: (_x = response.incomplete_details) == null ? void 0 : _x.reason,
3892
3952
  hasFunctionCall
3893
3953
  }),
3894
3954
  usage: {
3895
3955
  inputTokens: response.usage.input_tokens,
3896
3956
  outputTokens: response.usage.output_tokens,
3897
3957
  totalTokens: response.usage.input_tokens + response.usage.output_tokens,
3898
- reasoningTokens: (_q = (_p = response.usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
3899
- cachedInputTokens: (_s = (_r = response.usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
3958
+ reasoningTokens: (_z = (_y = response.usage.output_tokens_details) == null ? void 0 : _y.reasoning_tokens) != null ? _z : void 0,
3959
+ cachedInputTokens: (_B = (_A = response.usage.input_tokens_details) == null ? void 0 : _A.cached_tokens) != null ? _B : void 0
3900
3960
  },
3901
3961
  request: { body },
3902
3962
  response: {
@@ -3954,7 +4014,7 @@ var OpenAIResponsesLanguageModel = class {
3954
4014
  controller.enqueue({ type: "stream-start", warnings });
3955
4015
  },
3956
4016
  transform(chunk, controller) {
3957
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
4017
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
3958
4018
  if (options.includeRawChunks) {
3959
4019
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
3960
4020
  }
@@ -4359,6 +4419,37 @@ var OpenAIResponsesLanguageModel = class {
4359
4419
  }
4360
4420
  } : {}
4361
4421
  });
4422
+ } else if (value.annotation.type === "container_file_citation") {
4423
+ controller.enqueue({
4424
+ type: "source",
4425
+ sourceType: "document",
4426
+ id: (_y = (_x = (_w = self.config).generateId) == null ? void 0 : _x.call(_w)) != null ? _y : generateId2(),
4427
+ mediaType: "text/plain",
4428
+ title: (_A = (_z = value.annotation.filename) != null ? _z : value.annotation.file_id) != null ? _A : "Document",
4429
+ filename: (_B = value.annotation.filename) != null ? _B : value.annotation.file_id,
4430
+ providerMetadata: {
4431
+ openai: {
4432
+ fileId: value.annotation.file_id,
4433
+ containerId: value.annotation.container_id,
4434
+ ...value.annotation.index != null ? { index: value.annotation.index } : {}
4435
+ }
4436
+ }
4437
+ });
4438
+ } else if (value.annotation.type === "file_path") {
4439
+ controller.enqueue({
4440
+ type: "source",
4441
+ sourceType: "document",
4442
+ id: (_E = (_D = (_C = self.config).generateId) == null ? void 0 : _D.call(_C)) != null ? _E : generateId2(),
4443
+ mediaType: "application/octet-stream",
4444
+ title: value.annotation.file_id,
4445
+ filename: value.annotation.file_id,
4446
+ providerMetadata: {
4447
+ openai: {
4448
+ fileId: value.annotation.file_id,
4449
+ ...value.annotation.index != null ? { index: value.annotation.index } : {}
4450
+ }
4451
+ }
4452
+ });
4362
4453
  }
4363
4454
  } else if (isErrorChunk(value)) {
4364
4455
  controller.enqueue({ type: "error", error: value });