@zenning/openai 3.0.19 → 3.0.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2470,7 +2470,8 @@ async function convertToOpenAIResponsesInput({
2470
2470
  hasShellTool = false,
2471
2471
  hasApplyPatchTool = false,
2472
2472
  compactionInput,
2473
- previousResponseId
2473
+ previousResponseId,
2474
+ containsApprovalResponses
2474
2475
  }) {
2475
2476
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
2476
2477
  const input = [];
@@ -2557,13 +2558,10 @@ async function convertToOpenAIResponsesInput({
2557
2558
  switch (part.type) {
2558
2559
  case "text": {
2559
2560
  const id = (_b = (_a = part.providerOptions) == null ? void 0 : _a[providerOptionsName]) == null ? void 0 : _b.itemId;
2560
- if (store && id != null && !previousResponseId) {
2561
+ if (store && id != null) {
2561
2562
  input.push({ type: "item_reference", id });
2562
2563
  break;
2563
2564
  }
2564
- if (store && id != null && previousResponseId) {
2565
- break;
2566
- }
2567
2565
  input.push({
2568
2566
  role: "assistant",
2569
2567
  content: [{ type: "output_text", text: part.text }],
@@ -2574,16 +2572,16 @@ async function convertToOpenAIResponsesInput({
2574
2572
  case "tool-call": {
2575
2573
  const id = (_g = (_d = (_c = part.providerOptions) == null ? void 0 : _c[providerOptionsName]) == null ? void 0 : _d.itemId) != null ? _g : (_f = (_e = part.providerMetadata) == null ? void 0 : _e[providerOptionsName]) == null ? void 0 : _f.itemId;
2576
2574
  if (part.providerExecuted) {
2577
- if (store && id != null && !previousResponseId) {
2575
+ if (store && id != null && !containsApprovalResponses) {
2578
2576
  input.push({ type: "item_reference", id });
2579
2577
  }
2580
2578
  break;
2581
2579
  }
2582
- if (store && id != null && !previousResponseId) {
2580
+ if (store && id != null && !containsApprovalResponses) {
2583
2581
  input.push({ type: "item_reference", id });
2584
2582
  break;
2585
2583
  }
2586
- if (store && id != null && previousResponseId) {
2584
+ if (store && id != null && containsApprovalResponses) {
2587
2585
  break;
2588
2586
  }
2589
2587
  const resolvedToolName = toolNameMapping.toProviderToolName(
@@ -2641,10 +2639,9 @@ async function convertToOpenAIResponsesInput({
2641
2639
  if (part.output.type === "execution-denied" || part.output.type === "json" && typeof part.output.value === "object" && part.output.value != null && "type" in part.output.value && part.output.value.type === "execution-denied") {
2642
2640
  break;
2643
2641
  }
2644
- if (store && !previousResponseId) {
2642
+ if (store) {
2645
2643
  const itemId = (_j = (_i = (_h = part.providerMetadata) == null ? void 0 : _h[providerOptionsName]) == null ? void 0 : _i.itemId) != null ? _j : part.toolCallId;
2646
2644
  input.push({ type: "item_reference", id: itemId });
2647
- } else if (store) {
2648
2645
  } else {
2649
2646
  warnings.push({
2650
2647
  type: "other",
@@ -2662,7 +2659,7 @@ async function convertToOpenAIResponsesInput({
2662
2659
  const reasoningId = providerOptions == null ? void 0 : providerOptions.itemId;
2663
2660
  if (reasoningId != null) {
2664
2661
  const reasoningMessage = reasoningMessages[reasoningId];
2665
- if (store && !previousResponseId) {
2662
+ if (store) {
2666
2663
  if (reasoningMessage === void 0) {
2667
2664
  input.push({ type: "item_reference", id: reasoningId });
2668
2665
  reasoningMessages[reasoningId] = {
@@ -2671,14 +2668,6 @@ async function convertToOpenAIResponsesInput({
2671
2668
  summary: []
2672
2669
  };
2673
2670
  }
2674
- } else if (store) {
2675
- if (reasoningMessage === void 0) {
2676
- reasoningMessages[reasoningId] = {
2677
- type: "reasoning",
2678
- id: reasoningId,
2679
- summary: []
2680
- };
2681
- }
2682
2671
  } else {
2683
2672
  const summaryParts = [];
2684
2673
  if (part.text.length > 0) {
@@ -3798,7 +3787,12 @@ var openaiResponsesProviderOptionsSchema = (0, import_provider_utils24.lazySchem
3798
3787
  type: import_v417.z.literal("compaction"),
3799
3788
  encrypted_content: import_v417.z.string()
3800
3789
  })
3801
- ).optional()
3790
+ ).optional(),
3791
+ /**
3792
+ * Whether the request contains tool approval responses.
3793
+ * Defaults to `false`.
3794
+ */
3795
+ containsApprovalResponses: import_v417.z.boolean().optional()
3802
3796
  })
3803
3797
  )
3804
3798
  );
@@ -4321,7 +4315,7 @@ var OpenAIResponsesLanguageModel = class {
4321
4315
  toolChoice,
4322
4316
  responseFormat
4323
4317
  }) {
4324
- var _a, _b, _c, _d, _e, _f, _g;
4318
+ var _a, _b, _c, _d, _e, _f, _g, _h;
4325
4319
  const warnings = [];
4326
4320
  const modelCapabilities = getOpenAILanguageModelCapabilities(this.modelId);
4327
4321
  if (topK != null) {
@@ -4385,10 +4379,11 @@ var OpenAIResponsesLanguageModel = class {
4385
4379
  hasShellTool: hasOpenAITool("openai.shell"),
4386
4380
  hasApplyPatchTool: hasOpenAITool("openai.apply_patch"),
4387
4381
  compactionInput: openaiOptions == null ? void 0 : openaiOptions.compactionInput,
4388
- previousResponseId: (_d = openaiOptions == null ? void 0 : openaiOptions.previousResponseId) != null ? _d : void 0
4382
+ previousResponseId: (_d = openaiOptions == null ? void 0 : openaiOptions.previousResponseId) != null ? _d : void 0,
4383
+ containsApprovalResponses: (_e = openaiOptions == null ? void 0 : openaiOptions.containsApprovalResponses) != null ? _e : false
4389
4384
  });
4390
4385
  warnings.push(...inputWarnings);
4391
- const strictJsonSchema = (_e = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _e : true;
4386
+ const strictJsonSchema = (_f = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _f : true;
4392
4387
  let include = openaiOptions == null ? void 0 : openaiOptions.include;
4393
4388
  function addInclude(key) {
4394
4389
  if (include == null) {
@@ -4404,9 +4399,9 @@ var OpenAIResponsesLanguageModel = class {
4404
4399
  if (topLogprobs) {
4405
4400
  addInclude("message.output_text.logprobs");
4406
4401
  }
4407
- const webSearchToolName = (_f = tools == null ? void 0 : tools.find(
4402
+ const webSearchToolName = (_g = tools == null ? void 0 : tools.find(
4408
4403
  (tool) => tool.type === "provider" && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview")
4409
- )) == null ? void 0 : _f.name;
4404
+ )) == null ? void 0 : _g.name;
4410
4405
  if (webSearchToolName) {
4411
4406
  addInclude("web_search_call.action.sources");
4412
4407
  }
@@ -4429,7 +4424,7 @@ var OpenAIResponsesLanguageModel = class {
4429
4424
  format: responseFormat.schema != null ? {
4430
4425
  type: "json_schema",
4431
4426
  strict: strictJsonSchema,
4432
- name: (_g = responseFormat.name) != null ? _g : "response",
4427
+ name: (_h = responseFormat.name) != null ? _h : "response",
4433
4428
  description: responseFormat.description,
4434
4429
  schema: responseFormat.schema
4435
4430
  } : { type: "json_object" }