@ai-sdk/openai 2.0.98 → 2.0.100
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +18 -6
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +15 -3
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +2 -2
- package/dist/internal/index.d.ts +2 -2
- package/dist/internal/index.js +17 -5
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +14 -2
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -44,7 +44,7 @@ function getOpenAILanguageModelCapabilities(modelId) {
|
|
|
44
44
|
const supportsFlexProcessing = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
45
45
|
const supportsPriorityProcessing = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
46
46
|
const isReasoningModel = !(modelId.startsWith("gpt-3") || modelId.startsWith("gpt-4") || modelId.startsWith("chatgpt-4o") || modelId.startsWith("gpt-5-chat"));
|
|
47
|
-
const supportsNonReasoningParameters = modelId.startsWith("gpt-5.1") || modelId.startsWith("gpt-5.2") || modelId.startsWith("gpt-5.4");
|
|
47
|
+
const supportsNonReasoningParameters = modelId.startsWith("gpt-5.1") || modelId.startsWith("gpt-5.2") || modelId.startsWith("gpt-5.3") || modelId.startsWith("gpt-5.4");
|
|
48
48
|
const systemMessageMode = isReasoningModel ? "developer" : "system";
|
|
49
49
|
return {
|
|
50
50
|
supportsFlexProcessing,
|
|
@@ -2211,7 +2211,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2211
2211
|
hasLocalShellTool = false
|
|
2212
2212
|
}) {
|
|
2213
2213
|
var _a, _b, _c, _d, _e, _f;
|
|
2214
|
-
|
|
2214
|
+
let input = [];
|
|
2215
2215
|
const warnings = [];
|
|
2216
2216
|
for (const { role, content } of prompt) {
|
|
2217
2217
|
switch (role) {
|
|
@@ -2473,6 +2473,17 @@ async function convertToOpenAIResponsesInput({
|
|
|
2473
2473
|
}
|
|
2474
2474
|
}
|
|
2475
2475
|
}
|
|
2476
|
+
if (!store && input.some(
|
|
2477
|
+
(item) => "type" in item && item.type === "reasoning" && item.encrypted_content == null
|
|
2478
|
+
)) {
|
|
2479
|
+
warnings.push({
|
|
2480
|
+
type: "other",
|
|
2481
|
+
message: "Reasoning parts without encrypted content are not supported when store is false. Skipping reasoning parts."
|
|
2482
|
+
});
|
|
2483
|
+
input = input.filter(
|
|
2484
|
+
(item) => !("type" in item) || item.type !== "reasoning" || item.encrypted_content != null
|
|
2485
|
+
);
|
|
2486
|
+
}
|
|
2476
2487
|
return { input, warnings };
|
|
2477
2488
|
}
|
|
2478
2489
|
var openaiResponsesReasoningProviderOptionsSchema = z15.object({
|
|
@@ -3003,6 +3014,7 @@ var openaiResponsesReasoningModelIds = [
|
|
|
3003
3014
|
"gpt-5.2-chat-latest",
|
|
3004
3015
|
"gpt-5.2-pro",
|
|
3005
3016
|
"gpt-5.2-codex",
|
|
3017
|
+
"gpt-5.3-chat-latest",
|
|
3006
3018
|
"gpt-5.3-codex",
|
|
3007
3019
|
"gpt-5.4",
|
|
3008
3020
|
"gpt-5.4-2026-03-05",
|
|
@@ -4715,7 +4727,7 @@ var OpenAITranscriptionModel = class {
|
|
|
4715
4727
|
};
|
|
4716
4728
|
|
|
4717
4729
|
// src/version.ts
|
|
4718
|
-
var VERSION = true ? "2.0.
|
|
4730
|
+
var VERSION = true ? "2.0.100" : "0.0.0-test";
|
|
4719
4731
|
|
|
4720
4732
|
// src/openai-provider.ts
|
|
4721
4733
|
function createOpenAI(options = {}) {
|