@ai-sdk/openai 2.0.52 → 2.0.54
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.d.mts +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +3 -5
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +3 -5
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +2 -4
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +2 -4
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/internal/index.js
CHANGED
|
@@ -2983,6 +2983,7 @@ var openaiResponsesProviderOptionsSchema = (0, import_provider_utils22.lazyValid
|
|
|
2983
2983
|
store: import_v415.z.boolean().nullish(),
|
|
2984
2984
|
strictJsonSchema: import_v415.z.boolean().nullish(),
|
|
2985
2985
|
textVerbosity: import_v415.z.enum(["low", "medium", "high"]).nullish(),
|
|
2986
|
+
truncation: import_v415.z.enum(["auto", "disabled"]).nullish(),
|
|
2986
2987
|
user: import_v415.z.string().nullish()
|
|
2987
2988
|
})
|
|
2988
2989
|
)
|
|
@@ -3488,6 +3489,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3488
3489
|
prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
|
|
3489
3490
|
safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
|
|
3490
3491
|
top_logprobs: topLogprobs,
|
|
3492
|
+
truncation: openaiOptions == null ? void 0 : openaiOptions.truncation,
|
|
3491
3493
|
// model-specific settings:
|
|
3492
3494
|
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
3493
3495
|
reasoning: {
|
|
@@ -3498,9 +3500,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3498
3500
|
summary: openaiOptions.reasoningSummary
|
|
3499
3501
|
}
|
|
3500
3502
|
}
|
|
3501
|
-
},
|
|
3502
|
-
...modelConfig.requiredAutoTruncation && {
|
|
3503
|
-
truncation: "auto"
|
|
3504
3503
|
}
|
|
3505
3504
|
};
|
|
3506
3505
|
if (modelConfig.isReasoningModel) {
|
|
@@ -4342,7 +4341,6 @@ function getResponsesModelConfig(modelId) {
|
|
|
4342
4341
|
const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
4343
4342
|
const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
4344
4343
|
const defaults = {
|
|
4345
|
-
requiredAutoTruncation: false,
|
|
4346
4344
|
systemMessageMode: "system",
|
|
4347
4345
|
supportsFlexProcessing: supportsFlexProcessing2,
|
|
4348
4346
|
supportsPriorityProcessing: supportsPriorityProcessing2
|