@ai-sdk/openai 2.0.21 → 2.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2683,7 +2683,7 @@ var OpenAIResponsesLanguageModel = class {
2683
2683
  };
2684
2684
  }
2685
2685
  async doGenerate(options) {
2686
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
2686
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
2687
2687
  const { args: body, warnings } = await this.getArgs(options);
2688
2688
  const url = this.config.url({
2689
2689
  path: "/responses",
@@ -2729,10 +2729,12 @@ var OpenAIResponsesLanguageModel = class {
2729
2729
  }),
2730
2730
  import_v416.z.object({
2731
2731
  type: import_v416.z.literal("file_citation"),
2732
- start_index: import_v416.z.number(),
2733
- end_index: import_v416.z.number(),
2734
2732
  file_id: import_v416.z.string(),
2735
- quote: import_v416.z.string()
2733
+ filename: import_v416.z.string().nullish(),
2734
+ index: import_v416.z.number().nullish(),
2735
+ start_index: import_v416.z.number().nullish(),
2736
+ end_index: import_v416.z.number().nullish(),
2737
+ quote: import_v416.z.string().nullish()
2736
2738
  })
2737
2739
  ])
2738
2740
  )
@@ -2850,8 +2852,8 @@ var OpenAIResponsesLanguageModel = class {
2850
2852
  sourceType: "document",
2851
2853
  id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils14.generateId)(),
2852
2854
  mediaType: "text/plain",
2853
- title: annotation.quote,
2854
- filename: annotation.file_id
2855
+ title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
2856
+ filename: (_l = annotation.filename) != null ? _l : annotation.file_id
2855
2857
  });
2856
2858
  }
2857
2859
  }
@@ -2942,15 +2944,15 @@ var OpenAIResponsesLanguageModel = class {
2942
2944
  return {
2943
2945
  content,
2944
2946
  finishReason: mapOpenAIResponseFinishReason({
2945
- finishReason: (_j = response.incomplete_details) == null ? void 0 : _j.reason,
2947
+ finishReason: (_m = response.incomplete_details) == null ? void 0 : _m.reason,
2946
2948
  hasToolCalls: content.some((part) => part.type === "tool-call")
2947
2949
  }),
2948
2950
  usage: {
2949
2951
  inputTokens: response.usage.input_tokens,
2950
2952
  outputTokens: response.usage.output_tokens,
2951
2953
  totalTokens: response.usage.input_tokens + response.usage.output_tokens,
2952
- reasoningTokens: (_l = (_k = response.usage.output_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : void 0,
2953
- cachedInputTokens: (_n = (_m = response.usage.input_tokens_details) == null ? void 0 : _m.cached_tokens) != null ? _n : void 0
2954
+ reasoningTokens: (_o = (_n = response.usage.output_tokens_details) == null ? void 0 : _n.reasoning_tokens) != null ? _o : void 0,
2955
+ cachedInputTokens: (_q = (_p = response.usage.input_tokens_details) == null ? void 0 : _p.cached_tokens) != null ? _q : void 0
2954
2956
  },
2955
2957
  request: { body },
2956
2958
  response: {
@@ -3002,7 +3004,7 @@ var OpenAIResponsesLanguageModel = class {
3002
3004
  controller.enqueue({ type: "stream-start", warnings });
3003
3005
  },
3004
3006
  transform(chunk, controller) {
3005
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
3007
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u;
3006
3008
  if (options.includeRawChunks) {
3007
3009
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
3008
3010
  }
@@ -3268,8 +3270,8 @@ var OpenAIResponsesLanguageModel = class {
3268
3270
  sourceType: "document",
3269
3271
  id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0, import_provider_utils14.generateId)(),
3270
3272
  mediaType: "text/plain",
3271
- title: value.annotation.quote,
3272
- filename: value.annotation.file_id
3273
+ title: (_t = (_s = value.annotation.quote) != null ? _s : value.annotation.filename) != null ? _t : "Document",
3274
+ filename: (_u = value.annotation.filename) != null ? _u : value.annotation.file_id
3273
3275
  });
3274
3276
  }
3275
3277
  } else if (isErrorChunk(value)) {
@@ -3447,7 +3449,11 @@ var responseAnnotationAddedSchema = import_v416.z.object({
3447
3449
  import_v416.z.object({
3448
3450
  type: import_v416.z.literal("file_citation"),
3449
3451
  file_id: import_v416.z.string(),
3450
- quote: import_v416.z.string()
3452
+ filename: import_v416.z.string().nullish(),
3453
+ index: import_v416.z.number().nullish(),
3454
+ start_index: import_v416.z.number().nullish(),
3455
+ end_index: import_v416.z.number().nullish(),
3456
+ quote: import_v416.z.string().nullish()
3451
3457
  })
3452
3458
  ])
3453
3459
  });