@ai-sdk/openai 2.0.0-alpha.11 → 2.0.0-alpha.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -658,6 +658,9 @@ var OpenAIChatLanguageModel = class {
658
658
  },
659
659
  transform(chunk, controller) {
660
660
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
661
+ if (options.includeRawChunks) {
662
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
663
+ }
661
664
  if (!chunk.success) {
662
665
  finishReason = "error";
663
666
  controller.enqueue({ type: "error", error: chunk.error });
@@ -1246,6 +1249,9 @@ var OpenAICompletionLanguageModel = class {
1246
1249
  controller.enqueue({ type: "stream-start", warnings });
1247
1250
  },
1248
1251
  transform(chunk, controller) {
1252
+ if (options.includeRawChunks) {
1253
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1254
+ }
1249
1255
  if (!chunk.success) {
1250
1256
  finishReason = "error";
1251
1257
  controller.enqueue({ type: "error", error: chunk.error });
@@ -2287,6 +2293,7 @@ var OpenAIResponsesLanguageModel = class {
2287
2293
  let responseId = null;
2288
2294
  const ongoingToolCalls = {};
2289
2295
  let hasToolCalls = false;
2296
+ let lastReasoningSummaryIndex = null;
2290
2297
  return {
2291
2298
  stream: response.pipeThrough(
2292
2299
  new TransformStream({
@@ -2295,6 +2302,9 @@ var OpenAIResponsesLanguageModel = class {
2295
2302
  },
2296
2303
  transform(chunk, controller) {
2297
2304
  var _a, _b, _c, _d, _e, _f, _g, _h;
2305
+ if (options.includeRawChunks) {
2306
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
2307
+ }
2298
2308
  if (!chunk.success) {
2299
2309
  finishReason = "error";
2300
2310
  controller.enqueue({ type: "error", error: chunk.error });
@@ -2340,10 +2350,16 @@ var OpenAIResponsesLanguageModel = class {
2340
2350
  text: value.delta
2341
2351
  });
2342
2352
  } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
2353
+ if (lastReasoningSummaryIndex !== null && value.summary_index !== lastReasoningSummaryIndex) {
2354
+ controller.enqueue({ type: "reasoning-part-finish" });
2355
+ }
2356
+ lastReasoningSummaryIndex = value.summary_index;
2343
2357
  controller.enqueue({
2344
2358
  type: "reasoning",
2345
2359
  text: value.delta
2346
2360
  });
2361
+ } else if (isResponseReasoningSummaryPartDoneChunk(value)) {
2362
+ controller.enqueue({ type: "reasoning-part-finish" });
2347
2363
  } else if (isResponseOutputItemDoneChunk(value) && value.item.type === "function_call") {
2348
2364
  ongoingToolCalls[value.output_index] = void 0;
2349
2365
  hasToolCalls = true;
@@ -2472,6 +2488,13 @@ var responseReasoningSummaryTextDeltaSchema = z12.object({
2472
2488
  summary_index: z12.number(),
2473
2489
  delta: z12.string()
2474
2490
  });
2491
+ var responseReasoningSummaryPartDoneSchema = z12.object({
2492
+ type: z12.literal("response.reasoning_summary_part.done"),
2493
+ item_id: z12.string(),
2494
+ output_index: z12.number(),
2495
+ summary_index: z12.number(),
2496
+ part: z12.unknown().nullish()
2497
+ });
2475
2498
  var openaiResponsesChunkSchema = z12.union([
2476
2499
  textDeltaChunkSchema,
2477
2500
  responseFinishedChunkSchema,
@@ -2481,6 +2504,7 @@ var openaiResponsesChunkSchema = z12.union([
2481
2504
  responseOutputItemAddedSchema,
2482
2505
  responseAnnotationAddedSchema,
2483
2506
  responseReasoningSummaryTextDeltaSchema,
2507
+ responseReasoningSummaryPartDoneSchema,
2484
2508
  z12.object({ type: z12.string() }).passthrough()
2485
2509
  // fallback for unknown chunks
2486
2510
  ]);
@@ -2508,6 +2532,9 @@ function isResponseAnnotationAddedChunk(chunk) {
2508
2532
  function isResponseReasoningSummaryTextDeltaChunk(chunk) {
2509
2533
  return chunk.type === "response.reasoning_summary_text.delta";
2510
2534
  }
2535
+ function isResponseReasoningSummaryPartDoneChunk(chunk) {
2536
+ return chunk.type === "response.reasoning_summary_part.done";
2537
+ }
2511
2538
  function getResponsesModelConfig(modelId) {
2512
2539
  if (modelId.startsWith("o")) {
2513
2540
  if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {