@ai-sdk/openai 2.0.0-alpha.11 → 2.0.0-alpha.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -652,6 +652,9 @@ var OpenAIChatLanguageModel = class {
652
652
  },
653
653
  transform(chunk, controller) {
654
654
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
655
+ if (options.includeRawChunks) {
656
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
657
+ }
655
658
  if (!chunk.success) {
656
659
  finishReason = "error";
657
660
  controller.enqueue({ type: "error", error: chunk.error });
@@ -1240,6 +1243,9 @@ var OpenAICompletionLanguageModel = class {
1240
1243
  controller.enqueue({ type: "stream-start", warnings });
1241
1244
  },
1242
1245
  transform(chunk, controller) {
1246
+ if (options.includeRawChunks) {
1247
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1248
+ }
1243
1249
  if (!chunk.success) {
1244
1250
  finishReason = "error";
1245
1251
  controller.enqueue({ type: "error", error: chunk.error });
@@ -2364,6 +2370,7 @@ var OpenAIResponsesLanguageModel = class {
2364
2370
  let responseId = null;
2365
2371
  const ongoingToolCalls = {};
2366
2372
  let hasToolCalls = false;
2373
+ let lastReasoningSummaryIndex = null;
2367
2374
  return {
2368
2375
  stream: response.pipeThrough(
2369
2376
  new TransformStream({
@@ -2372,6 +2379,9 @@ var OpenAIResponsesLanguageModel = class {
2372
2379
  },
2373
2380
  transform(chunk, controller) {
2374
2381
  var _a, _b, _c, _d, _e, _f, _g, _h;
2382
+ if (options.includeRawChunks) {
2383
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
2384
+ }
2375
2385
  if (!chunk.success) {
2376
2386
  finishReason = "error";
2377
2387
  controller.enqueue({ type: "error", error: chunk.error });
@@ -2417,10 +2427,16 @@ var OpenAIResponsesLanguageModel = class {
2417
2427
  text: value.delta
2418
2428
  });
2419
2429
  } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
2430
+ if (lastReasoningSummaryIndex !== null && value.summary_index !== lastReasoningSummaryIndex) {
2431
+ controller.enqueue({ type: "reasoning-part-finish" });
2432
+ }
2433
+ lastReasoningSummaryIndex = value.summary_index;
2420
2434
  controller.enqueue({
2421
2435
  type: "reasoning",
2422
2436
  text: value.delta
2423
2437
  });
2438
+ } else if (isResponseReasoningSummaryPartDoneChunk(value)) {
2439
+ controller.enqueue({ type: "reasoning-part-finish" });
2424
2440
  } else if (isResponseOutputItemDoneChunk(value) && value.item.type === "function_call") {
2425
2441
  ongoingToolCalls[value.output_index] = void 0;
2426
2442
  hasToolCalls = true;
@@ -2549,6 +2565,13 @@ var responseReasoningSummaryTextDeltaSchema = z12.object({
2549
2565
  summary_index: z12.number(),
2550
2566
  delta: z12.string()
2551
2567
  });
2568
+ var responseReasoningSummaryPartDoneSchema = z12.object({
2569
+ type: z12.literal("response.reasoning_summary_part.done"),
2570
+ item_id: z12.string(),
2571
+ output_index: z12.number(),
2572
+ summary_index: z12.number(),
2573
+ part: z12.unknown().nullish()
2574
+ });
2552
2575
  var openaiResponsesChunkSchema = z12.union([
2553
2576
  textDeltaChunkSchema,
2554
2577
  responseFinishedChunkSchema,
@@ -2558,6 +2581,7 @@ var openaiResponsesChunkSchema = z12.union([
2558
2581
  responseOutputItemAddedSchema,
2559
2582
  responseAnnotationAddedSchema,
2560
2583
  responseReasoningSummaryTextDeltaSchema,
2584
+ responseReasoningSummaryPartDoneSchema,
2561
2585
  z12.object({ type: z12.string() }).passthrough()
2562
2586
  // fallback for unknown chunks
2563
2587
  ]);
@@ -2585,6 +2609,9 @@ function isResponseAnnotationAddedChunk(chunk) {
2585
2609
  function isResponseReasoningSummaryTextDeltaChunk(chunk) {
2586
2610
  return chunk.type === "response.reasoning_summary_text.delta";
2587
2611
  }
2612
+ function isResponseReasoningSummaryPartDoneChunk(chunk) {
2613
+ return chunk.type === "response.reasoning_summary_part.done";
2614
+ }
2588
2615
  function getResponsesModelConfig(modelId) {
2589
2616
  if (modelId.startsWith("o")) {
2590
2617
  if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {