@ai-sdk/openai 2.0.13 → 2.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2641,7 +2641,7 @@ var OpenAIResponsesLanguageModel = class {
2641
2641
  };
2642
2642
  }
2643
2643
  async doGenerate(options) {
2644
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
2644
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
2645
2645
  const { args: body, warnings } = await this.getArgs(options);
2646
2646
  const url = this.config.url({
2647
2647
  path: "/responses",
@@ -2677,13 +2677,22 @@ var OpenAIResponsesLanguageModel = class {
2677
2677
  text: import_v416.z.string(),
2678
2678
  logprobs: LOGPROBS_SCHEMA.nullish(),
2679
2679
  annotations: import_v416.z.array(
2680
- import_v416.z.object({
2681
- type: import_v416.z.literal("url_citation"),
2682
- start_index: import_v416.z.number(),
2683
- end_index: import_v416.z.number(),
2684
- url: import_v416.z.string(),
2685
- title: import_v416.z.string()
2686
- })
2680
+ import_v416.z.discriminatedUnion("type", [
2681
+ import_v416.z.object({
2682
+ type: import_v416.z.literal("url_citation"),
2683
+ start_index: import_v416.z.number(),
2684
+ end_index: import_v416.z.number(),
2685
+ url: import_v416.z.string(),
2686
+ title: import_v416.z.string()
2687
+ }),
2688
+ import_v416.z.object({
2689
+ type: import_v416.z.literal("file_citation"),
2690
+ start_index: import_v416.z.number(),
2691
+ end_index: import_v416.z.number(),
2692
+ file_id: import_v416.z.string(),
2693
+ quote: import_v416.z.string()
2694
+ })
2695
+ ])
2687
2696
  )
2688
2697
  })
2689
2698
  )
@@ -2789,13 +2798,24 @@ var OpenAIResponsesLanguageModel = class {
2789
2798
  }
2790
2799
  });
2791
2800
  for (const annotation of contentPart.annotations) {
2792
- content.push({
2793
- type: "source",
2794
- sourceType: "url",
2795
- id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils14.generateId)(),
2796
- url: annotation.url,
2797
- title: annotation.title
2798
- });
2801
+ if (annotation.type === "url_citation") {
2802
+ content.push({
2803
+ type: "source",
2804
+ sourceType: "url",
2805
+ id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils14.generateId)(),
2806
+ url: annotation.url,
2807
+ title: annotation.title
2808
+ });
2809
+ } else if (annotation.type === "file_citation") {
2810
+ content.push({
2811
+ type: "source",
2812
+ sourceType: "document",
2813
+ id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils14.generateId)(),
2814
+ mediaType: "text/plain",
2815
+ title: annotation.quote,
2816
+ filename: annotation.file_id
2817
+ });
2818
+ }
2799
2819
  }
2800
2820
  }
2801
2821
  break;
@@ -2884,15 +2904,15 @@ var OpenAIResponsesLanguageModel = class {
2884
2904
  return {
2885
2905
  content,
2886
2906
  finishReason: mapOpenAIResponseFinishReason({
2887
- finishReason: (_g = response.incomplete_details) == null ? void 0 : _g.reason,
2907
+ finishReason: (_j = response.incomplete_details) == null ? void 0 : _j.reason,
2888
2908
  hasToolCalls: content.some((part) => part.type === "tool-call")
2889
2909
  }),
2890
2910
  usage: {
2891
2911
  inputTokens: response.usage.input_tokens,
2892
2912
  outputTokens: response.usage.output_tokens,
2893
2913
  totalTokens: response.usage.input_tokens + response.usage.output_tokens,
2894
- reasoningTokens: (_i = (_h = response.usage.output_tokens_details) == null ? void 0 : _h.reasoning_tokens) != null ? _i : void 0,
2895
- cachedInputTokens: (_k = (_j = response.usage.input_tokens_details) == null ? void 0 : _j.cached_tokens) != null ? _k : void 0
2914
+ reasoningTokens: (_l = (_k = response.usage.output_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : void 0,
2915
+ cachedInputTokens: (_n = (_m = response.usage.input_tokens_details) == null ? void 0 : _m.cached_tokens) != null ? _n : void 0
2896
2916
  },
2897
2917
  request: { body },
2898
2918
  response: {
@@ -2944,7 +2964,7 @@ var OpenAIResponsesLanguageModel = class {
2944
2964
  controller.enqueue({ type: "stream-start", warnings });
2945
2965
  },
2946
2966
  transform(chunk, controller) {
2947
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
2967
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
2948
2968
  if (options.includeRawChunks) {
2949
2969
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
2950
2970
  }
@@ -3199,13 +3219,24 @@ var OpenAIResponsesLanguageModel = class {
3199
3219
  usage.reasoningTokens = (_h = (_g = value.response.usage.output_tokens_details) == null ? void 0 : _g.reasoning_tokens) != null ? _h : void 0;
3200
3220
  usage.cachedInputTokens = (_j = (_i = value.response.usage.input_tokens_details) == null ? void 0 : _i.cached_tokens) != null ? _j : void 0;
3201
3221
  } else if (isResponseAnnotationAddedChunk(value)) {
3202
- controller.enqueue({
3203
- type: "source",
3204
- sourceType: "url",
3205
- id: (_m = (_l = (_k = self.config).generateId) == null ? void 0 : _l.call(_k)) != null ? _m : (0, import_provider_utils14.generateId)(),
3206
- url: value.annotation.url,
3207
- title: value.annotation.title
3208
- });
3222
+ if (value.annotation.type === "url_citation") {
3223
+ controller.enqueue({
3224
+ type: "source",
3225
+ sourceType: "url",
3226
+ id: (_m = (_l = (_k = self.config).generateId) == null ? void 0 : _l.call(_k)) != null ? _m : (0, import_provider_utils14.generateId)(),
3227
+ url: value.annotation.url,
3228
+ title: value.annotation.title
3229
+ });
3230
+ } else if (value.annotation.type === "file_citation") {
3231
+ controller.enqueue({
3232
+ type: "source",
3233
+ sourceType: "document",
3234
+ id: (_p = (_o = (_n = self.config).generateId) == null ? void 0 : _o.call(_n)) != null ? _p : (0, import_provider_utils14.generateId)(),
3235
+ mediaType: "text/plain",
3236
+ title: value.annotation.quote,
3237
+ filename: value.annotation.file_id
3238
+ });
3239
+ }
3209
3240
  } else if (isErrorChunk(value)) {
3210
3241
  controller.enqueue({ type: "error", error: value });
3211
3242
  }
@@ -3372,11 +3403,18 @@ var responseFunctionCallArgumentsDeltaSchema = import_v416.z.object({
3372
3403
  });
3373
3404
  var responseAnnotationAddedSchema = import_v416.z.object({
3374
3405
  type: import_v416.z.literal("response.output_text.annotation.added"),
3375
- annotation: import_v416.z.object({
3376
- type: import_v416.z.literal("url_citation"),
3377
- url: import_v416.z.string(),
3378
- title: import_v416.z.string()
3379
- })
3406
+ annotation: import_v416.z.discriminatedUnion("type", [
3407
+ import_v416.z.object({
3408
+ type: import_v416.z.literal("url_citation"),
3409
+ url: import_v416.z.string(),
3410
+ title: import_v416.z.string()
3411
+ }),
3412
+ import_v416.z.object({
3413
+ type: import_v416.z.literal("file_citation"),
3414
+ file_id: import_v416.z.string(),
3415
+ quote: import_v416.z.string()
3416
+ })
3417
+ ])
3380
3418
  });
3381
3419
  var responseReasoningSummaryPartAddedSchema = import_v416.z.object({
3382
3420
  type: import_v416.z.literal("response.reasoning_summary_part.added"),