@zenning/openai 3.0.5 → 3.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,25 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 3.0.7
4
+
5
+ ### Patch Changes
6
+
7
+ - Add support for OpenAI Responses API compaction feature via provider options for context window management
8
+ - 10b232c: Fix openai file_search tool to accept optional query param
9
+ - Updated dependencies
10
+ - @zenning/provider@3.0.4
11
+ - @zenning/provider-utils@4.0.6
12
+
13
+ ## 3.0.6
14
+
15
+ ### Patch Changes
16
+
17
+ - Add support for OpenAI compaction message part type in system messages for managing context window limits
18
+ - 10b232c: Fix openai file_search tool to accept optional query param
19
+ - Updated dependencies
20
+ - @zenning/provider@3.0.3
21
+ - @zenning/provider-utils@4.0.5
22
+
3
23
  ## 3.0.5
4
24
 
5
25
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -457,6 +457,10 @@ declare const openaiResponsesProviderOptionsSchema: _zenning_provider_utils.Lazy
457
457
  user?: string | null | undefined;
458
458
  systemMessageMode?: "remove" | "system" | "developer" | undefined;
459
459
  forceReasoning?: boolean | undefined;
460
+ compactionInput?: {
461
+ type: "compaction";
462
+ encrypted_content: string;
463
+ }[] | undefined;
460
464
  }>;
461
465
  type OpenAIResponsesProviderOptions = InferSchema<typeof openaiResponsesProviderOptionsSchema>;
462
466
 
package/dist/index.d.ts CHANGED
@@ -457,6 +457,10 @@ declare const openaiResponsesProviderOptionsSchema: _zenning_provider_utils.Lazy
457
457
  user?: string | null | undefined;
458
458
  systemMessageMode?: "remove" | "system" | "developer" | undefined;
459
459
  forceReasoning?: boolean | undefined;
460
+ compactionInput?: {
461
+ type: "compaction";
462
+ encrypted_content: string;
463
+ }[] | undefined;
460
464
  }>;
461
465
  type OpenAIResponsesProviderOptions = InferSchema<typeof openaiResponsesProviderOptionsSchema>;
462
466
 
package/dist/index.js CHANGED
@@ -2477,12 +2477,16 @@ async function convertToOpenAIResponsesInput({
2477
2477
  store,
2478
2478
  hasLocalShellTool = false,
2479
2479
  hasShellTool = false,
2480
- hasApplyPatchTool = false
2480
+ hasApplyPatchTool = false,
2481
+ compactionInput
2481
2482
  }) {
2482
2483
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
2483
2484
  const input = [];
2484
2485
  const warnings = [];
2485
2486
  const processedApprovalIds = /* @__PURE__ */ new Set();
2487
+ if (compactionInput && compactionInput.length > 0) {
2488
+ input.push(...compactionInput);
2489
+ }
2486
2490
  for (const { role, content } of prompt) {
2487
2491
  switch (role) {
2488
2492
  case "system": {
@@ -3776,7 +3780,18 @@ var openaiResponsesProviderOptionsSchema = (0, import_provider_utils25.lazySchem
3776
3780
  * When enabled, the SDK applies reasoning-model parameter compatibility rules
3777
3781
  * and defaults `systemMessageMode` to `developer` unless overridden.
3778
3782
  */
3779
- forceReasoning: import_v420.z.boolean().optional()
3783
+ forceReasoning: import_v420.z.boolean().optional(),
3784
+ /**
3785
+ * Compaction input items to inject into the request.
3786
+ * These are standalone items from the /responses/compact endpoint that contain
3787
+ * encrypted conversation history for context window management.
3788
+ */
3789
+ compactionInput: import_v420.z.array(
3790
+ import_v420.z.object({
3791
+ type: import_v420.z.literal("compaction"),
3792
+ encrypted_content: import_v420.z.string()
3793
+ })
3794
+ ).optional()
3780
3795
  })
3781
3796
  )
3782
3797
  );
@@ -4069,7 +4084,8 @@ var OpenAIResponsesLanguageModel = class {
4069
4084
  store: (_c = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _c : true,
4070
4085
  hasLocalShellTool: hasOpenAITool("openai.local_shell"),
4071
4086
  hasShellTool: hasOpenAITool("openai.shell"),
4072
- hasApplyPatchTool: hasOpenAITool("openai.apply_patch")
4087
+ hasApplyPatchTool: hasOpenAITool("openai.apply_patch"),
4088
+ compactionInput: openaiOptions == null ? void 0 : openaiOptions.compactionInput
4073
4089
  });
4074
4090
  warnings.push(...inputWarnings);
4075
4091
  const strictJsonSchema = (_d = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _d : true;
@@ -5755,7 +5771,7 @@ var OpenAITranscriptionModel = class {
5755
5771
  };
5756
5772
 
5757
5773
  // src/version.ts
5758
- var VERSION = true ? "3.0.5" : "0.0.0-test";
5774
+ var VERSION = true ? "3.0.7" : "0.0.0-test";
5759
5775
 
5760
5776
  // src/openai-provider.ts
5761
5777
  function createOpenAI(options = {}) {