@ai-sdk/openai 3.0.0-beta.49 → 3.0.0-beta.51

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -2620,7 +2620,10 @@ var openaiResponsesChunkSchema = lazySchema14(
2620
2620
  queries: z16.array(z16.string()),
2621
2621
  results: z16.array(
2622
2622
  z16.object({
2623
- attributes: z16.record(z16.string(), z16.unknown()),
2623
+ attributes: z16.record(
2624
+ z16.string(),
2625
+ z16.union([z16.string(), z16.number(), z16.boolean()])
2626
+ ),
2624
2627
  file_id: z16.string(),
2625
2628
  filename: z16.string(),
2626
2629
  score: z16.number(),
@@ -2688,6 +2691,20 @@ var openaiResponsesChunkSchema = lazySchema14(
2688
2691
  start_index: z16.number().nullish(),
2689
2692
  end_index: z16.number().nullish(),
2690
2693
  quote: z16.string().nullish()
2694
+ }),
2695
+ z16.object({
2696
+ type: z16.literal("container_file_citation"),
2697
+ container_id: z16.string(),
2698
+ file_id: z16.string(),
2699
+ filename: z16.string().nullish(),
2700
+ start_index: z16.number().nullish(),
2701
+ end_index: z16.number().nullish(),
2702
+ index: z16.number().nullish()
2703
+ }),
2704
+ z16.object({
2705
+ type: z16.literal("file_path"),
2706
+ file_id: z16.string(),
2707
+ index: z16.number().nullish()
2691
2708
  })
2692
2709
  ])
2693
2710
  }),
@@ -2773,7 +2790,18 @@ var openaiResponsesResponseSchema = lazySchema14(
2773
2790
  quote: z16.string().nullish()
2774
2791
  }),
2775
2792
  z16.object({
2776
- type: z16.literal("container_file_citation")
2793
+ type: z16.literal("container_file_citation"),
2794
+ container_id: z16.string(),
2795
+ file_id: z16.string(),
2796
+ filename: z16.string().nullish(),
2797
+ start_index: z16.number().nullish(),
2798
+ end_index: z16.number().nullish(),
2799
+ index: z16.number().nullish()
2800
+ }),
2801
+ z16.object({
2802
+ type: z16.literal("file_path"),
2803
+ file_id: z16.string(),
2804
+ index: z16.number().nullish()
2777
2805
  })
2778
2806
  ])
2779
2807
  )
@@ -2812,7 +2840,10 @@ var openaiResponsesResponseSchema = lazySchema14(
2812
2840
  queries: z16.array(z16.string()),
2813
2841
  results: z16.array(
2814
2842
  z16.object({
2815
- attributes: z16.record(z16.string(), z16.unknown()),
2843
+ attributes: z16.record(
2844
+ z16.string(),
2845
+ z16.union([z16.string(), z16.number(), z16.boolean()])
2846
+ ),
2816
2847
  file_id: z16.string(),
2817
2848
  filename: z16.string(),
2818
2849
  score: z16.number(),
@@ -3354,7 +3385,7 @@ var OpenAIResponsesLanguageModel = class {
3354
3385
  };
3355
3386
  }
3356
3387
  async doGenerate(options) {
3357
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
3388
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B;
3358
3389
  const {
3359
3390
  args: body,
3360
3391
  warnings,
@@ -3452,13 +3483,17 @@ var OpenAIResponsesLanguageModel = class {
3452
3483
  if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
3453
3484
  logprobs.push(contentPart.logprobs);
3454
3485
  }
3486
+ const providerMetadata2 = {
3487
+ itemId: part.id,
3488
+ ...contentPart.annotations.length > 0 && {
3489
+ annotations: contentPart.annotations
3490
+ }
3491
+ };
3455
3492
  content.push({
3456
3493
  type: "text",
3457
3494
  text: contentPart.text,
3458
3495
  providerMetadata: {
3459
- openai: {
3460
- itemId: part.id
3461
- }
3496
+ openai: providerMetadata2
3462
3497
  }
3463
3498
  });
3464
3499
  for (const annotation of contentPart.annotations) {
@@ -3486,6 +3521,37 @@ var OpenAIResponsesLanguageModel = class {
3486
3521
  }
3487
3522
  } : {}
3488
3523
  });
3524
+ } else if (annotation.type === "container_file_citation") {
3525
+ content.push({
3526
+ type: "source",
3527
+ sourceType: "document",
3528
+ id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : generateId2(),
3529
+ mediaType: "text/plain",
3530
+ title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
3531
+ filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
3532
+ providerMetadata: {
3533
+ openai: {
3534
+ fileId: annotation.file_id,
3535
+ containerId: annotation.container_id,
3536
+ ...annotation.index != null ? { index: annotation.index } : {}
3537
+ }
3538
+ }
3539
+ });
3540
+ } else if (annotation.type === "file_path") {
3541
+ content.push({
3542
+ type: "source",
3543
+ sourceType: "document",
3544
+ id: (_u = (_t = (_s = this.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : generateId2(),
3545
+ mediaType: "application/octet-stream",
3546
+ title: annotation.file_id,
3547
+ filename: annotation.file_id,
3548
+ providerMetadata: {
3549
+ openai: {
3550
+ fileId: annotation.file_id,
3551
+ ...annotation.index != null ? { index: annotation.index } : {}
3552
+ }
3553
+ }
3554
+ });
3489
3555
  }
3490
3556
  }
3491
3557
  }
@@ -3555,13 +3621,13 @@ var OpenAIResponsesLanguageModel = class {
3555
3621
  toolName: "file_search",
3556
3622
  result: {
3557
3623
  queries: part.queries,
3558
- results: (_n = (_m = part.results) == null ? void 0 : _m.map((result) => ({
3624
+ results: (_w = (_v = part.results) == null ? void 0 : _v.map((result) => ({
3559
3625
  attributes: result.attributes,
3560
3626
  fileId: result.file_id,
3561
3627
  filename: result.filename,
3562
3628
  score: result.score,
3563
3629
  text: result.text
3564
- }))) != null ? _n : null
3630
+ }))) != null ? _w : null
3565
3631
  }
3566
3632
  });
3567
3633
  break;
@@ -3601,15 +3667,15 @@ var OpenAIResponsesLanguageModel = class {
3601
3667
  return {
3602
3668
  content,
3603
3669
  finishReason: mapOpenAIResponseFinishReason({
3604
- finishReason: (_o = response.incomplete_details) == null ? void 0 : _o.reason,
3670
+ finishReason: (_x = response.incomplete_details) == null ? void 0 : _x.reason,
3605
3671
  hasFunctionCall
3606
3672
  }),
3607
3673
  usage: {
3608
3674
  inputTokens: response.usage.input_tokens,
3609
3675
  outputTokens: response.usage.output_tokens,
3610
3676
  totalTokens: response.usage.input_tokens + response.usage.output_tokens,
3611
- reasoningTokens: (_q = (_p = response.usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
3612
- cachedInputTokens: (_s = (_r = response.usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
3677
+ reasoningTokens: (_z = (_y = response.usage.output_tokens_details) == null ? void 0 : _y.reasoning_tokens) != null ? _z : void 0,
3678
+ cachedInputTokens: (_B = (_A = response.usage.input_tokens_details) == null ? void 0 : _A.cached_tokens) != null ? _B : void 0
3613
3679
  },
3614
3680
  request: { body },
3615
3681
  response: {
@@ -3667,7 +3733,7 @@ var OpenAIResponsesLanguageModel = class {
3667
3733
  controller.enqueue({ type: "stream-start", warnings });
3668
3734
  },
3669
3735
  transform(chunk, controller) {
3670
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
3736
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
3671
3737
  if (options.includeRawChunks) {
3672
3738
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
3673
3739
  }
@@ -4072,6 +4138,37 @@ var OpenAIResponsesLanguageModel = class {
4072
4138
  }
4073
4139
  } : {}
4074
4140
  });
4141
+ } else if (value.annotation.type === "container_file_citation") {
4142
+ controller.enqueue({
4143
+ type: "source",
4144
+ sourceType: "document",
4145
+ id: (_y = (_x = (_w = self.config).generateId) == null ? void 0 : _x.call(_w)) != null ? _y : generateId2(),
4146
+ mediaType: "text/plain",
4147
+ title: (_A = (_z = value.annotation.filename) != null ? _z : value.annotation.file_id) != null ? _A : "Document",
4148
+ filename: (_B = value.annotation.filename) != null ? _B : value.annotation.file_id,
4149
+ providerMetadata: {
4150
+ openai: {
4151
+ fileId: value.annotation.file_id,
4152
+ containerId: value.annotation.container_id,
4153
+ ...value.annotation.index != null ? { index: value.annotation.index } : {}
4154
+ }
4155
+ }
4156
+ });
4157
+ } else if (value.annotation.type === "file_path") {
4158
+ controller.enqueue({
4159
+ type: "source",
4160
+ sourceType: "document",
4161
+ id: (_E = (_D = (_C = self.config).generateId) == null ? void 0 : _D.call(_C)) != null ? _E : generateId2(),
4162
+ mediaType: "application/octet-stream",
4163
+ title: value.annotation.file_id,
4164
+ filename: value.annotation.file_id,
4165
+ providerMetadata: {
4166
+ openai: {
4167
+ fileId: value.annotation.file_id,
4168
+ ...value.annotation.index != null ? { index: value.annotation.index } : {}
4169
+ }
4170
+ }
4171
+ });
4075
4172
  }
4076
4173
  } else if (isErrorChunk(value)) {
4077
4174
  controller.enqueue({ type: "error", error: value });
@@ -4545,7 +4642,7 @@ var OpenAITranscriptionModel = class {
4545
4642
  };
4546
4643
 
4547
4644
  // src/version.ts
4548
- var VERSION = true ? "3.0.0-beta.49" : "0.0.0-test";
4645
+ var VERSION = true ? "3.0.0-beta.51" : "0.0.0-test";
4549
4646
 
4550
4647
  // src/openai-provider.ts
4551
4648
  function createOpenAI(options = {}) {