@ai-sdk/openai 2.0.52 → 2.0.53

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 2.0.53
4
+
5
+ ### Patch Changes
6
+
7
+ - 5464bf0: fix(provider/openai): add truncation parameter support for Responses API
8
+
3
9
  ## 2.0.52
4
10
 
5
11
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -322,6 +322,7 @@ declare const openaiResponsesProviderOptionsSchema: _ai_sdk_provider_utils.LazyV
322
322
  store?: boolean | null | undefined;
323
323
  strictJsonSchema?: boolean | null | undefined;
324
324
  textVerbosity?: "low" | "medium" | "high" | null | undefined;
325
+ truncation?: "auto" | "disabled" | null | undefined;
325
326
  user?: string | null | undefined;
326
327
  }>;
327
328
  type OpenAIResponsesProviderOptions = InferValidator<typeof openaiResponsesProviderOptionsSchema>;
package/dist/index.d.ts CHANGED
@@ -322,6 +322,7 @@ declare const openaiResponsesProviderOptionsSchema: _ai_sdk_provider_utils.LazyV
322
322
  store?: boolean | null | undefined;
323
323
  strictJsonSchema?: boolean | null | undefined;
324
324
  textVerbosity?: "low" | "medium" | "high" | null | undefined;
325
+ truncation?: "auto" | "disabled" | null | undefined;
325
326
  user?: string | null | undefined;
326
327
  }>;
327
328
  type OpenAIResponsesProviderOptions = InferValidator<typeof openaiResponsesProviderOptionsSchema>;
package/dist/index.js CHANGED
@@ -2910,6 +2910,7 @@ var openaiResponsesProviderOptionsSchema = (0, import_provider_utils22.lazyValid
2910
2910
  store: import_v417.z.boolean().nullish(),
2911
2911
  strictJsonSchema: import_v417.z.boolean().nullish(),
2912
2912
  textVerbosity: import_v417.z.enum(["low", "medium", "high"]).nullish(),
2913
+ truncation: import_v417.z.enum(["auto", "disabled"]).nullish(),
2913
2914
  user: import_v417.z.string().nullish()
2914
2915
  })
2915
2916
  )
@@ -3187,6 +3188,7 @@ var OpenAIResponsesLanguageModel = class {
3187
3188
  prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
3188
3189
  safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
3189
3190
  top_logprobs: topLogprobs,
3191
+ truncation: openaiOptions == null ? void 0 : openaiOptions.truncation,
3190
3192
  // model-specific settings:
3191
3193
  ...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
3192
3194
  reasoning: {
@@ -3197,9 +3199,6 @@ var OpenAIResponsesLanguageModel = class {
3197
3199
  summary: openaiOptions.reasoningSummary
3198
3200
  }
3199
3201
  }
3200
- },
3201
- ...modelConfig.requiredAutoTruncation && {
3202
- truncation: "auto"
3203
3202
  }
3204
3203
  };
3205
3204
  if (modelConfig.isReasoningModel) {
@@ -4041,7 +4040,6 @@ function getResponsesModelConfig(modelId) {
4041
4040
  const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
4042
4041
  const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
4043
4042
  const defaults = {
4044
- requiredAutoTruncation: false,
4045
4043
  systemMessageMode: "system",
4046
4044
  supportsFlexProcessing: supportsFlexProcessing2,
4047
4045
  supportsPriorityProcessing: supportsPriorityProcessing2
@@ -4438,7 +4436,7 @@ var OpenAITranscriptionModel = class {
4438
4436
  };
4439
4437
 
4440
4438
  // src/version.ts
4441
- var VERSION = true ? "2.0.52" : "0.0.0-test";
4439
+ var VERSION = true ? "2.0.53" : "0.0.0-test";
4442
4440
 
4443
4441
  // src/openai-provider.ts
4444
4442
  function createOpenAI(options = {}) {