@ai-sdk/openai 2.0.0-beta.5 → 2.0.0-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 2.0.0-beta.6
4
+
5
+ ### Patch Changes
6
+
7
+ - 0eee6a8: Fix streaming and reconstruction of reasoning summary parts
8
+ - b5a0e32: fix (provider/openai): correct default for chat model strict mode
9
+ - c7d3b2e: fix (provider/openai): push first reasoning chunk in output item added event
10
+
3
11
  ## 2.0.0-beta.5
4
12
 
5
13
  ### Patch Changes
package/dist/index.js CHANGED
@@ -306,7 +306,7 @@ var openaiProviderOptions = import_v4.z.object({
306
306
  /**
307
307
  * Whether to use strict JSON schema validation.
308
308
  *
309
- * @default true
309
+ * @default false
310
310
  */
311
311
  strictJsonSchema: import_v4.z.boolean().optional()
312
312
  });
@@ -2547,6 +2547,7 @@ var OpenAIResponsesLanguageModel = class {
2547
2547
  let responseId = null;
2548
2548
  const ongoingToolCalls = {};
2549
2549
  let hasToolCalls = false;
2550
+ const activeReasoning = {};
2550
2551
  return {
2551
2552
  stream: response.pipeThrough(
2552
2553
  new TransformStream({
@@ -2554,7 +2555,7 @@ var OpenAIResponsesLanguageModel = class {
2554
2555
  controller.enqueue({ type: "stream-start", warnings });
2555
2556
  },
2556
2557
  transform(chunk, controller) {
2557
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
2558
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
2558
2559
  if (options.includeRawChunks) {
2559
2560
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
2560
2561
  }
@@ -2600,10 +2601,14 @@ var OpenAIResponsesLanguageModel = class {
2600
2601
  type: "text-start",
2601
2602
  id: value.item.id
2602
2603
  });
2603
- } else if (value.item.type === "reasoning") {
2604
+ } else if (isResponseOutputItemAddedReasoningChunk(value)) {
2605
+ activeReasoning[value.item.id] = {
2606
+ encryptedContent: value.item.encrypted_content,
2607
+ summaryParts: [0]
2608
+ };
2604
2609
  controller.enqueue({
2605
2610
  type: "reasoning-start",
2606
- id: value.item.id,
2611
+ id: `${value.item.id}:0`,
2607
2612
  providerMetadata: {
2608
2613
  openai: {
2609
2614
  reasoning: {
@@ -2681,19 +2686,23 @@ var OpenAIResponsesLanguageModel = class {
2681
2686
  type: "text-end",
2682
2687
  id: value.item.id
2683
2688
  });
2684
- } else if (value.item.type === "reasoning") {
2685
- controller.enqueue({
2686
- type: "reasoning-end",
2687
- id: value.item.id,
2688
- providerMetadata: {
2689
- openai: {
2690
- reasoning: {
2691
- id: value.item.id,
2692
- encryptedContent: (_b = value.item.encrypted_content) != null ? _b : null
2689
+ } else if (isResponseOutputItemDoneReasoningChunk(value)) {
2690
+ const activeReasoningPart = activeReasoning[value.item.id];
2691
+ for (const summaryIndex of activeReasoningPart.summaryParts) {
2692
+ controller.enqueue({
2693
+ type: "reasoning-end",
2694
+ id: `${value.item.id}:${summaryIndex}`,
2695
+ providerMetadata: {
2696
+ openai: {
2697
+ reasoning: {
2698
+ id: value.item.id,
2699
+ encryptedContent: (_b = value.item.encrypted_content) != null ? _b : null
2700
+ }
2693
2701
  }
2694
2702
  }
2695
- }
2696
- });
2703
+ });
2704
+ }
2705
+ delete activeReasoning[value.item.id];
2697
2706
  }
2698
2707
  } else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
2699
2708
  const toolCall = ongoingToolCalls[value.output_index];
@@ -2718,27 +2727,52 @@ var OpenAIResponsesLanguageModel = class {
2718
2727
  id: value.item_id,
2719
2728
  delta: value.delta
2720
2729
  });
2730
+ } else if (isResponseReasoningSummaryPartAddedChunk(value)) {
2731
+ if (value.summary_index > 0) {
2732
+ (_c = activeReasoning[value.item_id]) == null ? void 0 : _c.summaryParts.push(
2733
+ value.summary_index
2734
+ );
2735
+ controller.enqueue({
2736
+ type: "reasoning-start",
2737
+ id: `${value.item_id}:${value.summary_index}`,
2738
+ providerMetadata: {
2739
+ openai: {
2740
+ reasoning: {
2741
+ id: value.item_id,
2742
+ encryptedContent: (_e = (_d = activeReasoning[value.item_id]) == null ? void 0 : _d.encryptedContent) != null ? _e : null
2743
+ }
2744
+ }
2745
+ }
2746
+ });
2747
+ }
2721
2748
  } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
2722
2749
  controller.enqueue({
2723
2750
  type: "reasoning-delta",
2724
- id: value.item_id,
2725
- delta: value.delta
2751
+ id: `${value.item_id}:${value.summary_index}`,
2752
+ delta: value.delta,
2753
+ providerMetadata: {
2754
+ openai: {
2755
+ reasoning: {
2756
+ id: value.item_id
2757
+ }
2758
+ }
2759
+ }
2726
2760
  });
2727
2761
  } else if (isResponseFinishedChunk(value)) {
2728
2762
  finishReason = mapOpenAIResponseFinishReason({
2729
- finishReason: (_c = value.response.incomplete_details) == null ? void 0 : _c.reason,
2763
+ finishReason: (_f = value.response.incomplete_details) == null ? void 0 : _f.reason,
2730
2764
  hasToolCalls
2731
2765
  });
2732
2766
  usage.inputTokens = value.response.usage.input_tokens;
2733
2767
  usage.outputTokens = value.response.usage.output_tokens;
2734
2768
  usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
2735
- usage.reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : void 0;
2736
- usage.cachedInputTokens = (_g = (_f = value.response.usage.input_tokens_details) == null ? void 0 : _f.cached_tokens) != null ? _g : void 0;
2769
+ usage.reasoningTokens = (_h = (_g = value.response.usage.output_tokens_details) == null ? void 0 : _g.reasoning_tokens) != null ? _h : void 0;
2770
+ usage.cachedInputTokens = (_j = (_i = value.response.usage.input_tokens_details) == null ? void 0 : _i.cached_tokens) != null ? _j : void 0;
2737
2771
  } else if (isResponseAnnotationAddedChunk(value)) {
2738
2772
  controller.enqueue({
2739
2773
  type: "source",
2740
2774
  sourceType: "url",
2741
- id: (_j = (_i = (_h = self.config).generateId) == null ? void 0 : _i.call(_h)) != null ? _j : (0, import_provider_utils11.generateId)(),
2775
+ id: (_m = (_l = (_k = self.config).generateId) == null ? void 0 : _l.call(_k)) != null ? _m : (0, import_provider_utils11.generateId)(),
2742
2776
  url: value.annotation.url,
2743
2777
  title: value.annotation.title
2744
2778
  });
@@ -2809,13 +2843,7 @@ var responseOutputItemAddedSchema = import_v414.z.object({
2809
2843
  import_v414.z.object({
2810
2844
  type: import_v414.z.literal("reasoning"),
2811
2845
  id: import_v414.z.string(),
2812
- encrypted_content: import_v414.z.string().nullish(),
2813
- summary: import_v414.z.array(
2814
- import_v414.z.object({
2815
- type: import_v414.z.literal("summary_text"),
2816
- text: import_v414.z.string()
2817
- })
2818
- )
2846
+ encrypted_content: import_v414.z.string().nullish()
2819
2847
  }),
2820
2848
  import_v414.z.object({
2821
2849
  type: import_v414.z.literal("function_call"),
@@ -2847,13 +2875,7 @@ var responseOutputItemDoneSchema = import_v414.z.object({
2847
2875
  import_v414.z.object({
2848
2876
  type: import_v414.z.literal("reasoning"),
2849
2877
  id: import_v414.z.string(),
2850
- encrypted_content: import_v414.z.string().nullish(),
2851
- summary: import_v414.z.array(
2852
- import_v414.z.object({
2853
- type: import_v414.z.literal("summary_text"),
2854
- text: import_v414.z.string()
2855
- })
2856
- )
2878
+ encrypted_content: import_v414.z.string().nullish()
2857
2879
  }),
2858
2880
  import_v414.z.object({
2859
2881
  type: import_v414.z.literal("function_call"),
@@ -2889,9 +2911,15 @@ var responseAnnotationAddedSchema = import_v414.z.object({
2889
2911
  title: import_v414.z.string()
2890
2912
  })
2891
2913
  });
2914
+ var responseReasoningSummaryPartAddedSchema = import_v414.z.object({
2915
+ type: import_v414.z.literal("response.reasoning_summary_part.added"),
2916
+ item_id: import_v414.z.string(),
2917
+ summary_index: import_v414.z.number()
2918
+ });
2892
2919
  var responseReasoningSummaryTextDeltaSchema = import_v414.z.object({
2893
2920
  type: import_v414.z.literal("response.reasoning_summary_text.delta"),
2894
2921
  item_id: import_v414.z.string(),
2922
+ summary_index: import_v414.z.number(),
2895
2923
  delta: import_v414.z.string()
2896
2924
  });
2897
2925
  var openaiResponsesChunkSchema = import_v414.z.union([
@@ -2902,6 +2930,7 @@ var openaiResponsesChunkSchema = import_v414.z.union([
2902
2930
  responseOutputItemDoneSchema,
2903
2931
  responseFunctionCallArgumentsDeltaSchema,
2904
2932
  responseAnnotationAddedSchema,
2933
+ responseReasoningSummaryPartAddedSchema,
2905
2934
  responseReasoningSummaryTextDeltaSchema,
2906
2935
  errorChunkSchema,
2907
2936
  import_v414.z.object({ type: import_v414.z.string() }).loose()
@@ -2913,6 +2942,9 @@ function isTextDeltaChunk(chunk) {
2913
2942
  function isResponseOutputItemDoneChunk(chunk) {
2914
2943
  return chunk.type === "response.output_item.done";
2915
2944
  }
2945
+ function isResponseOutputItemDoneReasoningChunk(chunk) {
2946
+ return isResponseOutputItemDoneChunk(chunk) && chunk.item.type === "reasoning";
2947
+ }
2916
2948
  function isResponseFinishedChunk(chunk) {
2917
2949
  return chunk.type === "response.completed" || chunk.type === "response.incomplete";
2918
2950
  }
@@ -2925,9 +2957,15 @@ function isResponseFunctionCallArgumentsDeltaChunk(chunk) {
2925
2957
  function isResponseOutputItemAddedChunk(chunk) {
2926
2958
  return chunk.type === "response.output_item.added";
2927
2959
  }
2960
+ function isResponseOutputItemAddedReasoningChunk(chunk) {
2961
+ return isResponseOutputItemAddedChunk(chunk) && chunk.item.type === "reasoning";
2962
+ }
2928
2963
  function isResponseAnnotationAddedChunk(chunk) {
2929
2964
  return chunk.type === "response.output_text.annotation.added";
2930
2965
  }
2966
+ function isResponseReasoningSummaryPartAddedChunk(chunk) {
2967
+ return chunk.type === "response.reasoning_summary_part.added";
2968
+ }
2931
2969
  function isResponseReasoningSummaryTextDeltaChunk(chunk) {
2932
2970
  return chunk.type === "response.reasoning_summary_text.delta";
2933
2971
  }