@ai-sdk/openai 2.0.13 → 2.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 2.0.14
4
+
5
+ ### Patch Changes
6
+
7
+ - 7f47105: fix(provider/openai): support file_citation annotations in responses api
8
+
3
9
  ## 2.0.13
4
10
 
5
11
  ### Patch Changes
package/dist/index.js CHANGED
@@ -2322,7 +2322,7 @@ var OpenAIResponsesLanguageModel = class {
2322
2322
  };
2323
2323
  }
2324
2324
  async doGenerate(options) {
2325
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
2325
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
2326
2326
  const { args: body, warnings } = await this.getArgs(options);
2327
2327
  const url = this.config.url({
2328
2328
  path: "/responses",
@@ -2358,13 +2358,22 @@ var OpenAIResponsesLanguageModel = class {
2358
2358
  text: import_v413.z.string(),
2359
2359
  logprobs: LOGPROBS_SCHEMA.nullish(),
2360
2360
  annotations: import_v413.z.array(
2361
- import_v413.z.object({
2362
- type: import_v413.z.literal("url_citation"),
2363
- start_index: import_v413.z.number(),
2364
- end_index: import_v413.z.number(),
2365
- url: import_v413.z.string(),
2366
- title: import_v413.z.string()
2367
- })
2361
+ import_v413.z.discriminatedUnion("type", [
2362
+ import_v413.z.object({
2363
+ type: import_v413.z.literal("url_citation"),
2364
+ start_index: import_v413.z.number(),
2365
+ end_index: import_v413.z.number(),
2366
+ url: import_v413.z.string(),
2367
+ title: import_v413.z.string()
2368
+ }),
2369
+ import_v413.z.object({
2370
+ type: import_v413.z.literal("file_citation"),
2371
+ start_index: import_v413.z.number(),
2372
+ end_index: import_v413.z.number(),
2373
+ file_id: import_v413.z.string(),
2374
+ quote: import_v413.z.string()
2375
+ })
2376
+ ])
2368
2377
  )
2369
2378
  })
2370
2379
  )
@@ -2470,13 +2479,24 @@ var OpenAIResponsesLanguageModel = class {
2470
2479
  }
2471
2480
  });
2472
2481
  for (const annotation of contentPart.annotations) {
2473
- content.push({
2474
- type: "source",
2475
- sourceType: "url",
2476
- id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils12.generateId)(),
2477
- url: annotation.url,
2478
- title: annotation.title
2479
- });
2482
+ if (annotation.type === "url_citation") {
2483
+ content.push({
2484
+ type: "source",
2485
+ sourceType: "url",
2486
+ id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils12.generateId)(),
2487
+ url: annotation.url,
2488
+ title: annotation.title
2489
+ });
2490
+ } else if (annotation.type === "file_citation") {
2491
+ content.push({
2492
+ type: "source",
2493
+ sourceType: "document",
2494
+ id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils12.generateId)(),
2495
+ mediaType: "text/plain",
2496
+ title: annotation.quote,
2497
+ filename: annotation.file_id
2498
+ });
2499
+ }
2480
2500
  }
2481
2501
  }
2482
2502
  break;
@@ -2565,15 +2585,15 @@ var OpenAIResponsesLanguageModel = class {
2565
2585
  return {
2566
2586
  content,
2567
2587
  finishReason: mapOpenAIResponseFinishReason({
2568
- finishReason: (_g = response.incomplete_details) == null ? void 0 : _g.reason,
2588
+ finishReason: (_j = response.incomplete_details) == null ? void 0 : _j.reason,
2569
2589
  hasToolCalls: content.some((part) => part.type === "tool-call")
2570
2590
  }),
2571
2591
  usage: {
2572
2592
  inputTokens: response.usage.input_tokens,
2573
2593
  outputTokens: response.usage.output_tokens,
2574
2594
  totalTokens: response.usage.input_tokens + response.usage.output_tokens,
2575
- reasoningTokens: (_i = (_h = response.usage.output_tokens_details) == null ? void 0 : _h.reasoning_tokens) != null ? _i : void 0,
2576
- cachedInputTokens: (_k = (_j = response.usage.input_tokens_details) == null ? void 0 : _j.cached_tokens) != null ? _k : void 0
2595
+ reasoningTokens: (_l = (_k = response.usage.output_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : void 0,
2596
+ cachedInputTokens: (_n = (_m = response.usage.input_tokens_details) == null ? void 0 : _m.cached_tokens) != null ? _n : void 0
2577
2597
  },
2578
2598
  request: { body },
2579
2599
  response: {
@@ -2625,7 +2645,7 @@ var OpenAIResponsesLanguageModel = class {
2625
2645
  controller.enqueue({ type: "stream-start", warnings });
2626
2646
  },
2627
2647
  transform(chunk, controller) {
2628
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
2648
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
2629
2649
  if (options.includeRawChunks) {
2630
2650
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
2631
2651
  }
@@ -2880,13 +2900,24 @@ var OpenAIResponsesLanguageModel = class {
2880
2900
  usage.reasoningTokens = (_h = (_g = value.response.usage.output_tokens_details) == null ? void 0 : _g.reasoning_tokens) != null ? _h : void 0;
2881
2901
  usage.cachedInputTokens = (_j = (_i = value.response.usage.input_tokens_details) == null ? void 0 : _i.cached_tokens) != null ? _j : void 0;
2882
2902
  } else if (isResponseAnnotationAddedChunk(value)) {
2883
- controller.enqueue({
2884
- type: "source",
2885
- sourceType: "url",
2886
- id: (_m = (_l = (_k = self.config).generateId) == null ? void 0 : _l.call(_k)) != null ? _m : (0, import_provider_utils12.generateId)(),
2887
- url: value.annotation.url,
2888
- title: value.annotation.title
2889
- });
2903
+ if (value.annotation.type === "url_citation") {
2904
+ controller.enqueue({
2905
+ type: "source",
2906
+ sourceType: "url",
2907
+ id: (_m = (_l = (_k = self.config).generateId) == null ? void 0 : _l.call(_k)) != null ? _m : (0, import_provider_utils12.generateId)(),
2908
+ url: value.annotation.url,
2909
+ title: value.annotation.title
2910
+ });
2911
+ } else if (value.annotation.type === "file_citation") {
2912
+ controller.enqueue({
2913
+ type: "source",
2914
+ sourceType: "document",
2915
+ id: (_p = (_o = (_n = self.config).generateId) == null ? void 0 : _o.call(_n)) != null ? _p : (0, import_provider_utils12.generateId)(),
2916
+ mediaType: "text/plain",
2917
+ title: value.annotation.quote,
2918
+ filename: value.annotation.file_id
2919
+ });
2920
+ }
2890
2921
  } else if (isErrorChunk(value)) {
2891
2922
  controller.enqueue({ type: "error", error: value });
2892
2923
  }
@@ -3053,11 +3084,18 @@ var responseFunctionCallArgumentsDeltaSchema = import_v413.z.object({
3053
3084
  });
3054
3085
  var responseAnnotationAddedSchema = import_v413.z.object({
3055
3086
  type: import_v413.z.literal("response.output_text.annotation.added"),
3056
- annotation: import_v413.z.object({
3057
- type: import_v413.z.literal("url_citation"),
3058
- url: import_v413.z.string(),
3059
- title: import_v413.z.string()
3060
- })
3087
+ annotation: import_v413.z.discriminatedUnion("type", [
3088
+ import_v413.z.object({
3089
+ type: import_v413.z.literal("url_citation"),
3090
+ url: import_v413.z.string(),
3091
+ title: import_v413.z.string()
3092
+ }),
3093
+ import_v413.z.object({
3094
+ type: import_v413.z.literal("file_citation"),
3095
+ file_id: import_v413.z.string(),
3096
+ quote: import_v413.z.string()
3097
+ })
3098
+ ])
3061
3099
  });
3062
3100
  var responseReasoningSummaryPartAddedSchema = import_v413.z.object({
3063
3101
  type: import_v413.z.literal("response.reasoning_summary_part.added"),