@ai-sdk/openai 2.0.0-beta.4 → 2.0.0-beta.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1983,6 +1983,9 @@ var OpenAISpeechModel = class {
1983
1983
  };
1984
1984
 
1985
1985
  // src/responses/openai-responses-language-model.ts
1986
+ import {
1987
+ APICallError
1988
+ } from "@ai-sdk/provider";
1986
1989
  import {
1987
1990
  combineHeaders as combineHeaders7,
1988
1991
  createEventSourceResponseHandler as createEventSourceResponseHandler3,
@@ -2116,7 +2119,7 @@ async function convertToOpenAIResponsesMessages({
2116
2119
  const summaryParts = [];
2117
2120
  if (part.text.length > 0) {
2118
2121
  summaryParts.push({ type: "summary_text", text: part.text });
2119
- } else {
2122
+ } else if (existingReasoningMessage !== void 0) {
2120
2123
  warnings.push({
2121
2124
  type: "other",
2122
2125
  message: `Cannot append empty reasoning part to existing reasoning sequence. Skipping reasoning part: ${JSON.stringify(part)}.`
@@ -2436,15 +2439,16 @@ var OpenAIResponsesLanguageModel = class {
2436
2439
  async doGenerate(options) {
2437
2440
  var _a, _b, _c, _d, _e, _f, _g, _h, _i;
2438
2441
  const { args: body, warnings } = await this.getArgs(options);
2442
+ const url = this.config.url({
2443
+ path: "/responses",
2444
+ modelId: this.modelId
2445
+ });
2439
2446
  const {
2440
2447
  responseHeaders,
2441
2448
  value: response,
2442
2449
  rawValue: rawResponse
2443
2450
  } = await postJsonToApi6({
2444
- url: this.config.url({
2445
- path: "/responses",
2446
- modelId: this.modelId
2447
- }),
2451
+ url,
2448
2452
  headers: combineHeaders7(this.config.headers(), options.headers),
2449
2453
  body,
2450
2454
  failedResponseHandler: openaiFailedResponseHandler,
@@ -2452,6 +2456,10 @@ var OpenAIResponsesLanguageModel = class {
2452
2456
  z15.object({
2453
2457
  id: z15.string(),
2454
2458
  created_at: z15.number(),
2459
+ error: z15.object({
2460
+ code: z15.string(),
2461
+ message: z15.string()
2462
+ }).nullish(),
2455
2463
  model: z15.string(),
2456
2464
  output: z15.array(
2457
2465
  z15.discriminatedUnion("type", [
@@ -2510,6 +2518,17 @@ var OpenAIResponsesLanguageModel = class {
2510
2518
  abortSignal: options.abortSignal,
2511
2519
  fetch: this.config.fetch
2512
2520
  });
2521
+ if (response.error) {
2522
+ throw new APICallError({
2523
+ message: response.error.message,
2524
+ url,
2525
+ requestBodyValues: body,
2526
+ statusCode: 400,
2527
+ responseHeaders,
2528
+ responseBody: rawResponse,
2529
+ isRetryable: false
2530
+ });
2531
+ }
2513
2532
  const content = [];
2514
2533
  for (const part of response.output) {
2515
2534
  switch (part.type) {
@@ -2852,6 +2871,8 @@ var OpenAIResponsesLanguageModel = class {
2852
2871
  url: value.annotation.url,
2853
2872
  title: value.annotation.title
2854
2873
  });
2874
+ } else if (isErrorChunk(value)) {
2875
+ controller.enqueue({ type: "error", error: value });
2855
2876
  }
2856
2877
  },
2857
2878
  flush(controller) {
@@ -2884,6 +2905,13 @@ var textDeltaChunkSchema = z15.object({
2884
2905
  item_id: z15.string(),
2885
2906
  delta: z15.string()
2886
2907
  });
2908
+ var errorChunkSchema = z15.object({
2909
+ type: z15.literal("error"),
2910
+ code: z15.string(),
2911
+ message: z15.string(),
2912
+ param: z15.string().nullish(),
2913
+ sequence_number: z15.number()
2914
+ });
2887
2915
  var responseFinishedChunkSchema = z15.object({
2888
2916
  type: z15.enum(["response.completed", "response.incomplete"]),
2889
2917
  response: z15.object({
@@ -3004,7 +3032,8 @@ var openaiResponsesChunkSchema = z15.union([
3004
3032
  responseFunctionCallArgumentsDeltaSchema,
3005
3033
  responseAnnotationAddedSchema,
3006
3034
  responseReasoningSummaryTextDeltaSchema,
3007
- z15.object({ type: z15.string() }).passthrough()
3035
+ errorChunkSchema,
3036
+ z15.object({ type: z15.string() }).loose()
3008
3037
  // fallback for unknown chunks
3009
3038
  ]);
3010
3039
  function isTextDeltaChunk(chunk) {
@@ -3031,6 +3060,9 @@ function isResponseAnnotationAddedChunk(chunk) {
3031
3060
  function isResponseReasoningSummaryTextDeltaChunk(chunk) {
3032
3061
  return chunk.type === "response.reasoning_summary_text.delta";
3033
3062
  }
3063
+ function isErrorChunk(chunk) {
3064
+ return chunk.type === "error";
3065
+ }
3034
3066
  function getResponsesModelConfig(modelId) {
3035
3067
  if (modelId.startsWith("o") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
3036
3068
  if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {