@ai-sdk/openai 3.0.0-beta.32 → 3.0.0-beta.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +3 -5
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +3 -5
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +2 -4
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +2 -4
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.mjs
CHANGED
|
@@ -3017,6 +3017,7 @@ var openaiResponsesProviderOptionsSchema = lazySchema13(
|
|
|
3017
3017
|
store: z15.boolean().nullish(),
|
|
3018
3018
|
strictJsonSchema: z15.boolean().nullish(),
|
|
3019
3019
|
textVerbosity: z15.enum(["low", "medium", "high"]).nullish(),
|
|
3020
|
+
truncation: z15.enum(["auto", "disabled"]).nullish(),
|
|
3020
3021
|
user: z15.string().nullish()
|
|
3021
3022
|
})
|
|
3022
3023
|
)
|
|
@@ -3545,6 +3546,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3545
3546
|
prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
|
|
3546
3547
|
safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
|
|
3547
3548
|
top_logprobs: topLogprobs,
|
|
3549
|
+
truncation: openaiOptions == null ? void 0 : openaiOptions.truncation,
|
|
3548
3550
|
// model-specific settings:
|
|
3549
3551
|
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
3550
3552
|
reasoning: {
|
|
@@ -3555,9 +3557,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3555
3557
|
summary: openaiOptions.reasoningSummary
|
|
3556
3558
|
}
|
|
3557
3559
|
}
|
|
3558
|
-
},
|
|
3559
|
-
...modelConfig.requiredAutoTruncation && {
|
|
3560
|
-
truncation: "auto"
|
|
3561
3560
|
}
|
|
3562
3561
|
};
|
|
3563
3562
|
if (modelConfig.isReasoningModel) {
|
|
@@ -4413,7 +4412,6 @@ function getResponsesModelConfig(modelId) {
|
|
|
4413
4412
|
const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
4414
4413
|
const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
4415
4414
|
const defaults = {
|
|
4416
|
-
requiredAutoTruncation: false,
|
|
4417
4415
|
systemMessageMode: "system",
|
|
4418
4416
|
supportsFlexProcessing: supportsFlexProcessing2,
|
|
4419
4417
|
supportsPriorityProcessing: supportsPriorityProcessing2
|