@zenning/openai 3.0.16 → 3.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 3.0.18
4
+
5
+ ### Patch Changes
6
+
7
+ - reep
8
+
9
+ ## 3.0.17
10
+
11
+ ### Patch Changes
12
+
13
+ - rp
14
+
3
15
  ## 3.0.16
4
16
 
5
17
  ### Patch Changes
package/dist/index.js CHANGED
@@ -2479,14 +2479,12 @@ async function convertToOpenAIResponsesInput({
2479
2479
  hasShellTool = false,
2480
2480
  hasApplyPatchTool = false,
2481
2481
  compactionInput,
2482
- providerOptions
2482
+ previousResponseId
2483
2483
  }) {
2484
2484
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
2485
2485
  const input = [];
2486
2486
  const warnings = [];
2487
2487
  const processedApprovalIds = /* @__PURE__ */ new Set();
2488
- const skipItemReferences = (providerOptions == null ? void 0 : providerOptions.skipItemReferencesForApproval) === true;
2489
- console.log("[OpenAI Provider] Skip item references flag:", skipItemReferences);
2490
2488
  if (compactionInput && compactionInput.length > 0) {
2491
2489
  input.push(...compactionInput);
2492
2490
  }
@@ -2568,7 +2566,7 @@ async function convertToOpenAIResponsesInput({
2568
2566
  switch (part.type) {
2569
2567
  case "text": {
2570
2568
  const id = (_b = (_a = part.providerOptions) == null ? void 0 : _a[providerOptionsName]) == null ? void 0 : _b.itemId;
2571
- if (store && id != null) {
2569
+ if (store && id != null && !previousResponseId) {
2572
2570
  input.push({ type: "item_reference", id });
2573
2571
  break;
2574
2572
  }
@@ -2581,31 +2579,16 @@ async function convertToOpenAIResponsesInput({
2581
2579
  }
2582
2580
  case "tool-call": {
2583
2581
  const id = (_g = (_d = (_c = part.providerOptions) == null ? void 0 : _c[providerOptionsName]) == null ? void 0 : _d.itemId) != null ? _g : (_f = (_e = part.providerMetadata) == null ? void 0 : _e[providerOptionsName]) == null ? void 0 : _f.itemId;
2584
- console.log("[OpenAI Provider] Processing tool-call:", {
2585
- toolCallId: part.toolCallId,
2586
- toolName: part.toolName,
2587
- id,
2588
- providerExecuted: part.providerExecuted,
2589
- store,
2590
- skipItemReferences,
2591
- willCreateItemReference: store && id != null && !skipItemReferences
2592
- });
2593
2582
  if (part.providerExecuted) {
2594
- if (store && id != null && !skipItemReferences) {
2595
- console.log("[OpenAI Provider] Creating item_reference (providerExecuted)");
2583
+ if (store && id != null && !previousResponseId) {
2596
2584
  input.push({ type: "item_reference", id });
2597
2585
  }
2598
2586
  break;
2599
2587
  }
2600
- if (store && id != null && !skipItemReferences) {
2601
- console.log("[OpenAI Provider] Creating item_reference (non-providerExecuted)");
2588
+ if (store && id != null && !previousResponseId) {
2602
2589
  input.push({ type: "item_reference", id });
2603
2590
  break;
2604
2591
  }
2605
- if (skipItemReferences) {
2606
- console.log("[OpenAI Provider] Skipping function_call due to approval continuation flag");
2607
- break;
2608
- }
2609
2592
  const resolvedToolName = toolNameMapping.toProviderToolName(
2610
2593
  part.toolName
2611
2594
  );
@@ -2661,9 +2644,10 @@ async function convertToOpenAIResponsesInput({
2661
2644
  if (part.output.type === "execution-denied" || part.output.type === "json" && typeof part.output.value === "object" && part.output.value != null && "type" in part.output.value && part.output.value.type === "execution-denied") {
2662
2645
  break;
2663
2646
  }
2664
- if (store) {
2647
+ if (store && !previousResponseId) {
2665
2648
  const itemId = (_j = (_i = (_h = part.providerMetadata) == null ? void 0 : _h[providerOptionsName]) == null ? void 0 : _i.itemId) != null ? _j : part.toolCallId;
2666
2649
  input.push({ type: "item_reference", id: itemId });
2650
+ } else if (store) {
2667
2651
  } else {
2668
2652
  warnings.push({
2669
2653
  type: "other",
@@ -2673,15 +2657,15 @@ async function convertToOpenAIResponsesInput({
2673
2657
  break;
2674
2658
  }
2675
2659
  case "reasoning": {
2676
- const providerOptions2 = await (0, import_provider_utils23.parseProviderOptions)({
2660
+ const providerOptions = await (0, import_provider_utils23.parseProviderOptions)({
2677
2661
  provider: providerOptionsName,
2678
2662
  providerOptions: part.providerOptions,
2679
2663
  schema: openaiResponsesReasoningProviderOptionsSchema
2680
2664
  });
2681
- const reasoningId = providerOptions2 == null ? void 0 : providerOptions2.itemId;
2665
+ const reasoningId = providerOptions == null ? void 0 : providerOptions.itemId;
2682
2666
  if (reasoningId != null) {
2683
2667
  const reasoningMessage = reasoningMessages[reasoningId];
2684
- if (store) {
2668
+ if (store && !previousResponseId) {
2685
2669
  if (reasoningMessage === void 0) {
2686
2670
  input.push({ type: "item_reference", id: reasoningId });
2687
2671
  reasoningMessages[reasoningId] = {
@@ -2690,6 +2674,14 @@ async function convertToOpenAIResponsesInput({
2690
2674
  summary: []
2691
2675
  };
2692
2676
  }
2677
+ } else if (store) {
2678
+ if (reasoningMessage === void 0) {
2679
+ reasoningMessages[reasoningId] = {
2680
+ type: "reasoning",
2681
+ id: reasoningId,
2682
+ summary: []
2683
+ };
2684
+ }
2693
2685
  } else {
2694
2686
  const summaryParts = [];
2695
2687
  if (part.text.length > 0) {
@@ -2707,14 +2699,14 @@ async function convertToOpenAIResponsesInput({
2707
2699
  reasoningMessages[reasoningId] = {
2708
2700
  type: "reasoning",
2709
2701
  id: reasoningId,
2710
- encrypted_content: providerOptions2 == null ? void 0 : providerOptions2.reasoningEncryptedContent,
2702
+ encrypted_content: providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent,
2711
2703
  summary: summaryParts
2712
2704
  };
2713
2705
  input.push(reasoningMessages[reasoningId]);
2714
2706
  } else {
2715
2707
  reasoningMessage.summary.push(...summaryParts);
2716
- if ((providerOptions2 == null ? void 0 : providerOptions2.reasoningEncryptedContent) != null) {
2717
- reasoningMessage.encrypted_content = providerOptions2.reasoningEncryptedContent;
2708
+ if ((providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent) != null) {
2709
+ reasoningMessage.encrypted_content = providerOptions.reasoningEncryptedContent;
2718
2710
  }
2719
2711
  }
2720
2712
  }
@@ -2738,7 +2730,7 @@ async function convertToOpenAIResponsesInput({
2738
2730
  continue;
2739
2731
  }
2740
2732
  processedApprovalIds.add(approvalResponse.approvalId);
2741
- if (store) {
2733
+ if (store && !previousResponseId) {
2742
2734
  input.push({
2743
2735
  type: "item_reference",
2744
2736
  id: approvalResponse.approvalId
@@ -4040,7 +4032,7 @@ var OpenAIResponsesLanguageModel = class {
4040
4032
  toolChoice,
4041
4033
  responseFormat
4042
4034
  }) {
4043
- var _a, _b, _c, _d, _e, _f;
4035
+ var _a, _b, _c, _d, _e, _f, _g;
4044
4036
  const warnings = [];
4045
4037
  const modelCapabilities = getOpenAILanguageModelCapabilities(this.modelId);
4046
4038
  if (topK != null) {
@@ -4104,10 +4096,10 @@ var OpenAIResponsesLanguageModel = class {
4104
4096
  hasShellTool: hasOpenAITool("openai.shell"),
4105
4097
  hasApplyPatchTool: hasOpenAITool("openai.apply_patch"),
4106
4098
  compactionInput: openaiOptions == null ? void 0 : openaiOptions.compactionInput,
4107
- providerOptions: openaiOptions
4099
+ previousResponseId: (_d = openaiOptions == null ? void 0 : openaiOptions.previousResponseId) != null ? _d : void 0
4108
4100
  });
4109
4101
  warnings.push(...inputWarnings);
4110
- const strictJsonSchema = (_d = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _d : true;
4102
+ const strictJsonSchema = (_e = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _e : true;
4111
4103
  let include = openaiOptions == null ? void 0 : openaiOptions.include;
4112
4104
  function addInclude(key) {
4113
4105
  if (include == null) {
@@ -4123,9 +4115,9 @@ var OpenAIResponsesLanguageModel = class {
4123
4115
  if (topLogprobs) {
4124
4116
  addInclude("message.output_text.logprobs");
4125
4117
  }
4126
- const webSearchToolName = (_e = tools == null ? void 0 : tools.find(
4118
+ const webSearchToolName = (_f = tools == null ? void 0 : tools.find(
4127
4119
  (tool) => tool.type === "provider" && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview")
4128
- )) == null ? void 0 : _e.name;
4120
+ )) == null ? void 0 : _f.name;
4129
4121
  if (webSearchToolName) {
4130
4122
  addInclude("web_search_call.action.sources");
4131
4123
  }
@@ -4148,7 +4140,7 @@ var OpenAIResponsesLanguageModel = class {
4148
4140
  format: responseFormat.schema != null ? {
4149
4141
  type: "json_schema",
4150
4142
  strict: strictJsonSchema,
4151
- name: (_f = responseFormat.name) != null ? _f : "response",
4143
+ name: (_g = responseFormat.name) != null ? _g : "response",
4152
4144
  description: responseFormat.description,
4153
4145
  schema: responseFormat.schema
4154
4146
  } : { type: "json_object" }
@@ -5790,7 +5782,7 @@ var OpenAITranscriptionModel = class {
5790
5782
  };
5791
5783
 
5792
5784
  // src/version.ts
5793
- var VERSION = true ? "3.0.15" : "0.0.0-test";
5785
+ var VERSION = true ? "3.0.17" : "0.0.0-test";
5794
5786
 
5795
5787
  // src/openai-provider.ts
5796
5788
  function createOpenAI(options = {}) {