@ai-sdk/openai 2.0.99 → 2.0.101
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -13
- package/dist/index.d.mts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +21 -6
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -3
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +2 -2
- package/dist/internal/index.d.ts +2 -2
- package/dist/internal/index.js +20 -5
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +17 -2
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -3
package/dist/index.mjs
CHANGED
|
@@ -42,7 +42,7 @@ var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
|
42
42
|
// src/openai-language-model-capabilities.ts
|
|
43
43
|
function getOpenAILanguageModelCapabilities(modelId) {
|
|
44
44
|
const supportsFlexProcessing = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
45
|
-
const supportsPriorityProcessing = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5
|
|
45
|
+
const supportsPriorityProcessing = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") && !modelId.startsWith("gpt-5.4-nano") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
46
46
|
const isReasoningModel = !(modelId.startsWith("gpt-3") || modelId.startsWith("gpt-4") || modelId.startsWith("chatgpt-4o") || modelId.startsWith("gpt-5-chat"));
|
|
47
47
|
const supportsNonReasoningParameters = modelId.startsWith("gpt-5.1") || modelId.startsWith("gpt-5.2") || modelId.startsWith("gpt-5.3") || modelId.startsWith("gpt-5.4");
|
|
48
48
|
const systemMessageMode = isReasoningModel ? "developer" : "system";
|
|
@@ -2211,7 +2211,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2211
2211
|
hasLocalShellTool = false
|
|
2212
2212
|
}) {
|
|
2213
2213
|
var _a, _b, _c, _d, _e, _f;
|
|
2214
|
-
|
|
2214
|
+
let input = [];
|
|
2215
2215
|
const warnings = [];
|
|
2216
2216
|
for (const { role, content } of prompt) {
|
|
2217
2217
|
switch (role) {
|
|
@@ -2473,6 +2473,17 @@ async function convertToOpenAIResponsesInput({
|
|
|
2473
2473
|
}
|
|
2474
2474
|
}
|
|
2475
2475
|
}
|
|
2476
|
+
if (!store && input.some(
|
|
2477
|
+
(item) => "type" in item && item.type === "reasoning" && item.encrypted_content == null
|
|
2478
|
+
)) {
|
|
2479
|
+
warnings.push({
|
|
2480
|
+
type: "other",
|
|
2481
|
+
message: "Reasoning parts without encrypted content are not supported when store is false. Skipping reasoning parts."
|
|
2482
|
+
});
|
|
2483
|
+
input = input.filter(
|
|
2484
|
+
(item) => !("type" in item) || item.type !== "reasoning" || item.encrypted_content != null
|
|
2485
|
+
);
|
|
2486
|
+
}
|
|
2476
2487
|
return { input, warnings };
|
|
2477
2488
|
}
|
|
2478
2489
|
var openaiResponsesReasoningProviderOptionsSchema = z15.object({
|
|
@@ -3007,6 +3018,10 @@ var openaiResponsesReasoningModelIds = [
|
|
|
3007
3018
|
"gpt-5.3-codex",
|
|
3008
3019
|
"gpt-5.4",
|
|
3009
3020
|
"gpt-5.4-2026-03-05",
|
|
3021
|
+
"gpt-5.4-mini",
|
|
3022
|
+
"gpt-5.4-mini-2026-03-17",
|
|
3023
|
+
"gpt-5.4-nano",
|
|
3024
|
+
"gpt-5.4-nano-2026-03-17",
|
|
3010
3025
|
"gpt-5.4-pro",
|
|
3011
3026
|
"gpt-5.4-pro-2026-03-05"
|
|
3012
3027
|
];
|
|
@@ -4716,7 +4731,7 @@ var OpenAITranscriptionModel = class {
|
|
|
4716
4731
|
};
|
|
4717
4732
|
|
|
4718
4733
|
// src/version.ts
|
|
4719
|
-
var VERSION = true ? "2.0.
|
|
4734
|
+
var VERSION = true ? "2.0.101" : "0.0.0-test";
|
|
4720
4735
|
|
|
4721
4736
|
// src/openai-provider.ts
|
|
4722
4737
|
function createOpenAI(options = {}) {
|