@ai-sdk/openai 3.0.0-beta.49 → 3.0.0-beta.51

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,19 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 3.0.0-beta.51
4
+
5
+ ### Patch Changes
6
+
7
+ - b82987c: feat(openai): support openai code-interpreter annotations
8
+
9
+ ## 3.0.0-beta.50
10
+
11
+ ### Patch Changes
12
+
13
+ - Updated dependencies [bb36798]
14
+ - @ai-sdk/provider@3.0.0-beta.15
15
+ - @ai-sdk/provider-utils@4.0.0-beta.31
16
+
3
17
  ## 3.0.0-beta.49
4
18
 
5
19
  ### Patch Changes
package/dist/index.js CHANGED
@@ -2569,7 +2569,10 @@ var openaiResponsesChunkSchema = (0, import_provider_utils21.lazySchema)(
2569
2569
  queries: import_v416.z.array(import_v416.z.string()),
2570
2570
  results: import_v416.z.array(
2571
2571
  import_v416.z.object({
2572
- attributes: import_v416.z.record(import_v416.z.string(), import_v416.z.unknown()),
2572
+ attributes: import_v416.z.record(
2573
+ import_v416.z.string(),
2574
+ import_v416.z.union([import_v416.z.string(), import_v416.z.number(), import_v416.z.boolean()])
2575
+ ),
2573
2576
  file_id: import_v416.z.string(),
2574
2577
  filename: import_v416.z.string(),
2575
2578
  score: import_v416.z.number(),
@@ -2637,6 +2640,20 @@ var openaiResponsesChunkSchema = (0, import_provider_utils21.lazySchema)(
2637
2640
  start_index: import_v416.z.number().nullish(),
2638
2641
  end_index: import_v416.z.number().nullish(),
2639
2642
  quote: import_v416.z.string().nullish()
2643
+ }),
2644
+ import_v416.z.object({
2645
+ type: import_v416.z.literal("container_file_citation"),
2646
+ container_id: import_v416.z.string(),
2647
+ file_id: import_v416.z.string(),
2648
+ filename: import_v416.z.string().nullish(),
2649
+ start_index: import_v416.z.number().nullish(),
2650
+ end_index: import_v416.z.number().nullish(),
2651
+ index: import_v416.z.number().nullish()
2652
+ }),
2653
+ import_v416.z.object({
2654
+ type: import_v416.z.literal("file_path"),
2655
+ file_id: import_v416.z.string(),
2656
+ index: import_v416.z.number().nullish()
2640
2657
  })
2641
2658
  ])
2642
2659
  }),
@@ -2722,7 +2739,18 @@ var openaiResponsesResponseSchema = (0, import_provider_utils21.lazySchema)(
2722
2739
  quote: import_v416.z.string().nullish()
2723
2740
  }),
2724
2741
  import_v416.z.object({
2725
- type: import_v416.z.literal("container_file_citation")
2742
+ type: import_v416.z.literal("container_file_citation"),
2743
+ container_id: import_v416.z.string(),
2744
+ file_id: import_v416.z.string(),
2745
+ filename: import_v416.z.string().nullish(),
2746
+ start_index: import_v416.z.number().nullish(),
2747
+ end_index: import_v416.z.number().nullish(),
2748
+ index: import_v416.z.number().nullish()
2749
+ }),
2750
+ import_v416.z.object({
2751
+ type: import_v416.z.literal("file_path"),
2752
+ file_id: import_v416.z.string(),
2753
+ index: import_v416.z.number().nullish()
2726
2754
  })
2727
2755
  ])
2728
2756
  )
@@ -2761,7 +2789,10 @@ var openaiResponsesResponseSchema = (0, import_provider_utils21.lazySchema)(
2761
2789
  queries: import_v416.z.array(import_v416.z.string()),
2762
2790
  results: import_v416.z.array(
2763
2791
  import_v416.z.object({
2764
- attributes: import_v416.z.record(import_v416.z.string(), import_v416.z.unknown()),
2792
+ attributes: import_v416.z.record(
2793
+ import_v416.z.string(),
2794
+ import_v416.z.union([import_v416.z.string(), import_v416.z.number(), import_v416.z.boolean()])
2795
+ ),
2765
2796
  file_id: import_v416.z.string(),
2766
2797
  filename: import_v416.z.string(),
2767
2798
  score: import_v416.z.number(),
@@ -3301,7 +3332,7 @@ var OpenAIResponsesLanguageModel = class {
3301
3332
  };
3302
3333
  }
3303
3334
  async doGenerate(options) {
3304
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
3335
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B;
3305
3336
  const {
3306
3337
  args: body,
3307
3338
  warnings,
@@ -3399,13 +3430,17 @@ var OpenAIResponsesLanguageModel = class {
3399
3430
  if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
3400
3431
  logprobs.push(contentPart.logprobs);
3401
3432
  }
3433
+ const providerMetadata2 = {
3434
+ itemId: part.id,
3435
+ ...contentPart.annotations.length > 0 && {
3436
+ annotations: contentPart.annotations
3437
+ }
3438
+ };
3402
3439
  content.push({
3403
3440
  type: "text",
3404
3441
  text: contentPart.text,
3405
3442
  providerMetadata: {
3406
- openai: {
3407
- itemId: part.id
3408
- }
3443
+ openai: providerMetadata2
3409
3444
  }
3410
3445
  });
3411
3446
  for (const annotation of contentPart.annotations) {
@@ -3433,6 +3468,37 @@ var OpenAIResponsesLanguageModel = class {
3433
3468
  }
3434
3469
  } : {}
3435
3470
  });
3471
+ } else if (annotation.type === "container_file_citation") {
3472
+ content.push({
3473
+ type: "source",
3474
+ sourceType: "document",
3475
+ id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils24.generateId)(),
3476
+ mediaType: "text/plain",
3477
+ title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
3478
+ filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
3479
+ providerMetadata: {
3480
+ openai: {
3481
+ fileId: annotation.file_id,
3482
+ containerId: annotation.container_id,
3483
+ ...annotation.index != null ? { index: annotation.index } : {}
3484
+ }
3485
+ }
3486
+ });
3487
+ } else if (annotation.type === "file_path") {
3488
+ content.push({
3489
+ type: "source",
3490
+ sourceType: "document",
3491
+ id: (_u = (_t = (_s = this.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : (0, import_provider_utils24.generateId)(),
3492
+ mediaType: "application/octet-stream",
3493
+ title: annotation.file_id,
3494
+ filename: annotation.file_id,
3495
+ providerMetadata: {
3496
+ openai: {
3497
+ fileId: annotation.file_id,
3498
+ ...annotation.index != null ? { index: annotation.index } : {}
3499
+ }
3500
+ }
3501
+ });
3436
3502
  }
3437
3503
  }
3438
3504
  }
@@ -3502,13 +3568,13 @@ var OpenAIResponsesLanguageModel = class {
3502
3568
  toolName: "file_search",
3503
3569
  result: {
3504
3570
  queries: part.queries,
3505
- results: (_n = (_m = part.results) == null ? void 0 : _m.map((result) => ({
3571
+ results: (_w = (_v = part.results) == null ? void 0 : _v.map((result) => ({
3506
3572
  attributes: result.attributes,
3507
3573
  fileId: result.file_id,
3508
3574
  filename: result.filename,
3509
3575
  score: result.score,
3510
3576
  text: result.text
3511
- }))) != null ? _n : null
3577
+ }))) != null ? _w : null
3512
3578
  }
3513
3579
  });
3514
3580
  break;
@@ -3548,15 +3614,15 @@ var OpenAIResponsesLanguageModel = class {
3548
3614
  return {
3549
3615
  content,
3550
3616
  finishReason: mapOpenAIResponseFinishReason({
3551
- finishReason: (_o = response.incomplete_details) == null ? void 0 : _o.reason,
3617
+ finishReason: (_x = response.incomplete_details) == null ? void 0 : _x.reason,
3552
3618
  hasFunctionCall
3553
3619
  }),
3554
3620
  usage: {
3555
3621
  inputTokens: response.usage.input_tokens,
3556
3622
  outputTokens: response.usage.output_tokens,
3557
3623
  totalTokens: response.usage.input_tokens + response.usage.output_tokens,
3558
- reasoningTokens: (_q = (_p = response.usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
3559
- cachedInputTokens: (_s = (_r = response.usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
3624
+ reasoningTokens: (_z = (_y = response.usage.output_tokens_details) == null ? void 0 : _y.reasoning_tokens) != null ? _z : void 0,
3625
+ cachedInputTokens: (_B = (_A = response.usage.input_tokens_details) == null ? void 0 : _A.cached_tokens) != null ? _B : void 0
3560
3626
  },
3561
3627
  request: { body },
3562
3628
  response: {
@@ -3614,7 +3680,7 @@ var OpenAIResponsesLanguageModel = class {
3614
3680
  controller.enqueue({ type: "stream-start", warnings });
3615
3681
  },
3616
3682
  transform(chunk, controller) {
3617
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
3683
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
3618
3684
  if (options.includeRawChunks) {
3619
3685
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
3620
3686
  }
@@ -4019,6 +4085,37 @@ var OpenAIResponsesLanguageModel = class {
4019
4085
  }
4020
4086
  } : {}
4021
4087
  });
4088
+ } else if (value.annotation.type === "container_file_citation") {
4089
+ controller.enqueue({
4090
+ type: "source",
4091
+ sourceType: "document",
4092
+ id: (_y = (_x = (_w = self.config).generateId) == null ? void 0 : _x.call(_w)) != null ? _y : (0, import_provider_utils24.generateId)(),
4093
+ mediaType: "text/plain",
4094
+ title: (_A = (_z = value.annotation.filename) != null ? _z : value.annotation.file_id) != null ? _A : "Document",
4095
+ filename: (_B = value.annotation.filename) != null ? _B : value.annotation.file_id,
4096
+ providerMetadata: {
4097
+ openai: {
4098
+ fileId: value.annotation.file_id,
4099
+ containerId: value.annotation.container_id,
4100
+ ...value.annotation.index != null ? { index: value.annotation.index } : {}
4101
+ }
4102
+ }
4103
+ });
4104
+ } else if (value.annotation.type === "file_path") {
4105
+ controller.enqueue({
4106
+ type: "source",
4107
+ sourceType: "document",
4108
+ id: (_E = (_D = (_C = self.config).generateId) == null ? void 0 : _D.call(_C)) != null ? _E : (0, import_provider_utils24.generateId)(),
4109
+ mediaType: "application/octet-stream",
4110
+ title: value.annotation.file_id,
4111
+ filename: value.annotation.file_id,
4112
+ providerMetadata: {
4113
+ openai: {
4114
+ fileId: value.annotation.file_id,
4115
+ ...value.annotation.index != null ? { index: value.annotation.index } : {}
4116
+ }
4117
+ }
4118
+ });
4022
4119
  }
4023
4120
  } else if (isErrorChunk(value)) {
4024
4121
  controller.enqueue({ type: "error", error: value });
@@ -4480,7 +4577,7 @@ var OpenAITranscriptionModel = class {
4480
4577
  };
4481
4578
 
4482
4579
  // src/version.ts
4483
- var VERSION = true ? "3.0.0-beta.49" : "0.0.0-test";
4580
+ var VERSION = true ? "3.0.0-beta.51" : "0.0.0-test";
4484
4581
 
4485
4582
  // src/openai-provider.ts
4486
4583
  function createOpenAI(options = {}) {