@ai-sdk/openai 3.0.0-beta.49 → 3.0.0-beta.51

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2631,7 +2631,10 @@ var openaiResponsesChunkSchema = (0, import_provider_utils21.lazySchema)(
2631
2631
  queries: import_v414.z.array(import_v414.z.string()),
2632
2632
  results: import_v414.z.array(
2633
2633
  import_v414.z.object({
2634
- attributes: import_v414.z.record(import_v414.z.string(), import_v414.z.unknown()),
2634
+ attributes: import_v414.z.record(
2635
+ import_v414.z.string(),
2636
+ import_v414.z.union([import_v414.z.string(), import_v414.z.number(), import_v414.z.boolean()])
2637
+ ),
2635
2638
  file_id: import_v414.z.string(),
2636
2639
  filename: import_v414.z.string(),
2637
2640
  score: import_v414.z.number(),
@@ -2699,6 +2702,20 @@ var openaiResponsesChunkSchema = (0, import_provider_utils21.lazySchema)(
2699
2702
  start_index: import_v414.z.number().nullish(),
2700
2703
  end_index: import_v414.z.number().nullish(),
2701
2704
  quote: import_v414.z.string().nullish()
2705
+ }),
2706
+ import_v414.z.object({
2707
+ type: import_v414.z.literal("container_file_citation"),
2708
+ container_id: import_v414.z.string(),
2709
+ file_id: import_v414.z.string(),
2710
+ filename: import_v414.z.string().nullish(),
2711
+ start_index: import_v414.z.number().nullish(),
2712
+ end_index: import_v414.z.number().nullish(),
2713
+ index: import_v414.z.number().nullish()
2714
+ }),
2715
+ import_v414.z.object({
2716
+ type: import_v414.z.literal("file_path"),
2717
+ file_id: import_v414.z.string(),
2718
+ index: import_v414.z.number().nullish()
2702
2719
  })
2703
2720
  ])
2704
2721
  }),
@@ -2784,7 +2801,18 @@ var openaiResponsesResponseSchema = (0, import_provider_utils21.lazySchema)(
2784
2801
  quote: import_v414.z.string().nullish()
2785
2802
  }),
2786
2803
  import_v414.z.object({
2787
- type: import_v414.z.literal("container_file_citation")
2804
+ type: import_v414.z.literal("container_file_citation"),
2805
+ container_id: import_v414.z.string(),
2806
+ file_id: import_v414.z.string(),
2807
+ filename: import_v414.z.string().nullish(),
2808
+ start_index: import_v414.z.number().nullish(),
2809
+ end_index: import_v414.z.number().nullish(),
2810
+ index: import_v414.z.number().nullish()
2811
+ }),
2812
+ import_v414.z.object({
2813
+ type: import_v414.z.literal("file_path"),
2814
+ file_id: import_v414.z.string(),
2815
+ index: import_v414.z.number().nullish()
2788
2816
  })
2789
2817
  ])
2790
2818
  )
@@ -2823,7 +2851,10 @@ var openaiResponsesResponseSchema = (0, import_provider_utils21.lazySchema)(
2823
2851
  queries: import_v414.z.array(import_v414.z.string()),
2824
2852
  results: import_v414.z.array(
2825
2853
  import_v414.z.object({
2826
- attributes: import_v414.z.record(import_v414.z.string(), import_v414.z.unknown()),
2854
+ attributes: import_v414.z.record(
2855
+ import_v414.z.string(),
2856
+ import_v414.z.union([import_v414.z.string(), import_v414.z.number(), import_v414.z.boolean()])
2857
+ ),
2827
2858
  file_id: import_v414.z.string(),
2828
2859
  filename: import_v414.z.string(),
2829
2860
  score: import_v414.z.number(),
@@ -3597,7 +3628,7 @@ var OpenAIResponsesLanguageModel = class {
3597
3628
  };
3598
3629
  }
3599
3630
  async doGenerate(options) {
3600
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
3631
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B;
3601
3632
  const {
3602
3633
  args: body,
3603
3634
  warnings,
@@ -3695,13 +3726,17 @@ var OpenAIResponsesLanguageModel = class {
3695
3726
  if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
3696
3727
  logprobs.push(contentPart.logprobs);
3697
3728
  }
3729
+ const providerMetadata2 = {
3730
+ itemId: part.id,
3731
+ ...contentPart.annotations.length > 0 && {
3732
+ annotations: contentPart.annotations
3733
+ }
3734
+ };
3698
3735
  content.push({
3699
3736
  type: "text",
3700
3737
  text: contentPart.text,
3701
3738
  providerMetadata: {
3702
- openai: {
3703
- itemId: part.id
3704
- }
3739
+ openai: providerMetadata2
3705
3740
  }
3706
3741
  });
3707
3742
  for (const annotation of contentPart.annotations) {
@@ -3729,6 +3764,37 @@ var OpenAIResponsesLanguageModel = class {
3729
3764
  }
3730
3765
  } : {}
3731
3766
  });
3767
+ } else if (annotation.type === "container_file_citation") {
3768
+ content.push({
3769
+ type: "source",
3770
+ sourceType: "document",
3771
+ id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils29.generateId)(),
3772
+ mediaType: "text/plain",
3773
+ title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
3774
+ filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
3775
+ providerMetadata: {
3776
+ openai: {
3777
+ fileId: annotation.file_id,
3778
+ containerId: annotation.container_id,
3779
+ ...annotation.index != null ? { index: annotation.index } : {}
3780
+ }
3781
+ }
3782
+ });
3783
+ } else if (annotation.type === "file_path") {
3784
+ content.push({
3785
+ type: "source",
3786
+ sourceType: "document",
3787
+ id: (_u = (_t = (_s = this.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : (0, import_provider_utils29.generateId)(),
3788
+ mediaType: "application/octet-stream",
3789
+ title: annotation.file_id,
3790
+ filename: annotation.file_id,
3791
+ providerMetadata: {
3792
+ openai: {
3793
+ fileId: annotation.file_id,
3794
+ ...annotation.index != null ? { index: annotation.index } : {}
3795
+ }
3796
+ }
3797
+ });
3732
3798
  }
3733
3799
  }
3734
3800
  }
@@ -3798,13 +3864,13 @@ var OpenAIResponsesLanguageModel = class {
3798
3864
  toolName: "file_search",
3799
3865
  result: {
3800
3866
  queries: part.queries,
3801
- results: (_n = (_m = part.results) == null ? void 0 : _m.map((result) => ({
3867
+ results: (_w = (_v = part.results) == null ? void 0 : _v.map((result) => ({
3802
3868
  attributes: result.attributes,
3803
3869
  fileId: result.file_id,
3804
3870
  filename: result.filename,
3805
3871
  score: result.score,
3806
3872
  text: result.text
3807
- }))) != null ? _n : null
3873
+ }))) != null ? _w : null
3808
3874
  }
3809
3875
  });
3810
3876
  break;
@@ -3844,15 +3910,15 @@ var OpenAIResponsesLanguageModel = class {
3844
3910
  return {
3845
3911
  content,
3846
3912
  finishReason: mapOpenAIResponseFinishReason({
3847
- finishReason: (_o = response.incomplete_details) == null ? void 0 : _o.reason,
3913
+ finishReason: (_x = response.incomplete_details) == null ? void 0 : _x.reason,
3848
3914
  hasFunctionCall
3849
3915
  }),
3850
3916
  usage: {
3851
3917
  inputTokens: response.usage.input_tokens,
3852
3918
  outputTokens: response.usage.output_tokens,
3853
3919
  totalTokens: response.usage.input_tokens + response.usage.output_tokens,
3854
- reasoningTokens: (_q = (_p = response.usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
3855
- cachedInputTokens: (_s = (_r = response.usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
3920
+ reasoningTokens: (_z = (_y = response.usage.output_tokens_details) == null ? void 0 : _y.reasoning_tokens) != null ? _z : void 0,
3921
+ cachedInputTokens: (_B = (_A = response.usage.input_tokens_details) == null ? void 0 : _A.cached_tokens) != null ? _B : void 0
3856
3922
  },
3857
3923
  request: { body },
3858
3924
  response: {
@@ -3910,7 +3976,7 @@ var OpenAIResponsesLanguageModel = class {
3910
3976
  controller.enqueue({ type: "stream-start", warnings });
3911
3977
  },
3912
3978
  transform(chunk, controller) {
3913
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
3979
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
3914
3980
  if (options.includeRawChunks) {
3915
3981
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
3916
3982
  }
@@ -4315,6 +4381,37 @@ var OpenAIResponsesLanguageModel = class {
4315
4381
  }
4316
4382
  } : {}
4317
4383
  });
4384
+ } else if (value.annotation.type === "container_file_citation") {
4385
+ controller.enqueue({
4386
+ type: "source",
4387
+ sourceType: "document",
4388
+ id: (_y = (_x = (_w = self.config).generateId) == null ? void 0 : _x.call(_w)) != null ? _y : (0, import_provider_utils29.generateId)(),
4389
+ mediaType: "text/plain",
4390
+ title: (_A = (_z = value.annotation.filename) != null ? _z : value.annotation.file_id) != null ? _A : "Document",
4391
+ filename: (_B = value.annotation.filename) != null ? _B : value.annotation.file_id,
4392
+ providerMetadata: {
4393
+ openai: {
4394
+ fileId: value.annotation.file_id,
4395
+ containerId: value.annotation.container_id,
4396
+ ...value.annotation.index != null ? { index: value.annotation.index } : {}
4397
+ }
4398
+ }
4399
+ });
4400
+ } else if (value.annotation.type === "file_path") {
4401
+ controller.enqueue({
4402
+ type: "source",
4403
+ sourceType: "document",
4404
+ id: (_E = (_D = (_C = self.config).generateId) == null ? void 0 : _D.call(_C)) != null ? _E : (0, import_provider_utils29.generateId)(),
4405
+ mediaType: "application/octet-stream",
4406
+ title: value.annotation.file_id,
4407
+ filename: value.annotation.file_id,
4408
+ providerMetadata: {
4409
+ openai: {
4410
+ fileId: value.annotation.file_id,
4411
+ ...value.annotation.index != null ? { index: value.annotation.index } : {}
4412
+ }
4413
+ }
4414
+ });
4318
4415
  }
4319
4416
  } else if (isErrorChunk(value)) {
4320
4417
  controller.enqueue({ type: "error", error: value });