@ai-sdk/openai 2.0.99 → 2.0.101
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -13
- package/dist/index.d.mts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +21 -6
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -3
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +2 -2
- package/dist/internal/index.d.ts +2 -2
- package/dist/internal/index.js +20 -5
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +17 -2
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -34,7 +34,7 @@ var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
|
34
34
|
// src/openai-language-model-capabilities.ts
|
|
35
35
|
function getOpenAILanguageModelCapabilities(modelId) {
|
|
36
36
|
const supportsFlexProcessing = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
37
|
-
const supportsPriorityProcessing = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5
|
|
37
|
+
const supportsPriorityProcessing = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") && !modelId.startsWith("gpt-5.4-nano") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
38
38
|
const isReasoningModel = !(modelId.startsWith("gpt-3") || modelId.startsWith("gpt-4") || modelId.startsWith("chatgpt-4o") || modelId.startsWith("gpt-5-chat"));
|
|
39
39
|
const supportsNonReasoningParameters = modelId.startsWith("gpt-5.1") || modelId.startsWith("gpt-5.2") || modelId.startsWith("gpt-5.3") || modelId.startsWith("gpt-5.4");
|
|
40
40
|
const systemMessageMode = isReasoningModel ? "developer" : "system";
|
|
@@ -2248,7 +2248,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2248
2248
|
hasLocalShellTool = false
|
|
2249
2249
|
}) {
|
|
2250
2250
|
var _a, _b, _c, _d, _e, _f;
|
|
2251
|
-
|
|
2251
|
+
let input = [];
|
|
2252
2252
|
const warnings = [];
|
|
2253
2253
|
for (const { role, content } of prompt) {
|
|
2254
2254
|
switch (role) {
|
|
@@ -2510,6 +2510,17 @@ async function convertToOpenAIResponsesInput({
|
|
|
2510
2510
|
}
|
|
2511
2511
|
}
|
|
2512
2512
|
}
|
|
2513
|
+
if (!store && input.some(
|
|
2514
|
+
(item) => "type" in item && item.type === "reasoning" && item.encrypted_content == null
|
|
2515
|
+
)) {
|
|
2516
|
+
warnings.push({
|
|
2517
|
+
type: "other",
|
|
2518
|
+
message: "Reasoning parts without encrypted content are not supported when store is false. Skipping reasoning parts."
|
|
2519
|
+
});
|
|
2520
|
+
input = input.filter(
|
|
2521
|
+
(item) => !("type" in item) || item.type !== "reasoning" || item.encrypted_content != null
|
|
2522
|
+
);
|
|
2523
|
+
}
|
|
2513
2524
|
return { input, warnings };
|
|
2514
2525
|
}
|
|
2515
2526
|
var openaiResponsesReasoningProviderOptionsSchema = z13.object({
|
|
@@ -3044,6 +3055,10 @@ var openaiResponsesReasoningModelIds = [
|
|
|
3044
3055
|
"gpt-5.3-codex",
|
|
3045
3056
|
"gpt-5.4",
|
|
3046
3057
|
"gpt-5.4-2026-03-05",
|
|
3058
|
+
"gpt-5.4-mini",
|
|
3059
|
+
"gpt-5.4-mini-2026-03-17",
|
|
3060
|
+
"gpt-5.4-nano",
|
|
3061
|
+
"gpt-5.4-nano-2026-03-17",
|
|
3047
3062
|
"gpt-5.4-pro",
|
|
3048
3063
|
"gpt-5.4-pro-2026-03-05"
|
|
3049
3064
|
];
|