@ai-sdk/openai 2.0.77 → 2.0.79
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +70 -111
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +70 -111
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +69 -110
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +69 -110
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -31,6 +31,22 @@ var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
|
31
31
|
errorToMessage: (data) => data.error.message
|
|
32
32
|
});
|
|
33
33
|
|
|
34
|
+
// src/openai-language-model-capabilities.ts
|
|
35
|
+
function getOpenAILanguageModelCapabilities(modelId) {
|
|
36
|
+
const supportsFlexProcessing = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
37
|
+
const supportsPriorityProcessing = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
38
|
+
const isReasoningModel = !(modelId.startsWith("gpt-3") || modelId.startsWith("gpt-4") || modelId.startsWith("chatgpt-4o") || modelId.startsWith("gpt-5-chat"));
|
|
39
|
+
const supportsNonReasoningParameters = modelId.startsWith("gpt-5.1");
|
|
40
|
+
const systemMessageMode = isReasoningModel ? "developer" : "system";
|
|
41
|
+
return {
|
|
42
|
+
supportsFlexProcessing,
|
|
43
|
+
supportsPriorityProcessing,
|
|
44
|
+
isReasoningModel,
|
|
45
|
+
systemMessageMode,
|
|
46
|
+
supportsNonReasoningParameters
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
|
|
34
50
|
// src/chat/convert-to-openai-chat-messages.ts
|
|
35
51
|
import {
|
|
36
52
|
UnsupportedFunctionalityError
|
|
@@ -590,6 +606,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
590
606
|
schema: openaiChatLanguageModelOptions
|
|
591
607
|
})) != null ? _a : {};
|
|
592
608
|
const structuredOutputs = (_b = openaiOptions.structuredOutputs) != null ? _b : true;
|
|
609
|
+
const modelCapabilities = getOpenAILanguageModelCapabilities(this.modelId);
|
|
593
610
|
if (topK != null) {
|
|
594
611
|
warnings.push({
|
|
595
612
|
type: "unsupported-setting",
|
|
@@ -606,7 +623,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
606
623
|
const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(
|
|
607
624
|
{
|
|
608
625
|
prompt,
|
|
609
|
-
systemMessageMode:
|
|
626
|
+
systemMessageMode: modelCapabilities.systemMessageMode
|
|
610
627
|
}
|
|
611
628
|
);
|
|
612
629
|
warnings.push(...messageWarnings);
|
|
@@ -652,22 +669,31 @@ var OpenAIChatLanguageModel = class {
|
|
|
652
669
|
// messages:
|
|
653
670
|
messages
|
|
654
671
|
};
|
|
655
|
-
if (isReasoningModel
|
|
656
|
-
if (
|
|
657
|
-
baseArgs.temperature
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
baseArgs.top_p
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
672
|
+
if (modelCapabilities.isReasoningModel) {
|
|
673
|
+
if (openaiOptions.reasoningEffort !== "none" || !modelCapabilities.supportsNonReasoningParameters) {
|
|
674
|
+
if (baseArgs.temperature != null) {
|
|
675
|
+
baseArgs.temperature = void 0;
|
|
676
|
+
warnings.push({
|
|
677
|
+
type: "unsupported-setting",
|
|
678
|
+
setting: "temperature",
|
|
679
|
+
details: "temperature is not supported for reasoning models"
|
|
680
|
+
});
|
|
681
|
+
}
|
|
682
|
+
if (baseArgs.top_p != null) {
|
|
683
|
+
baseArgs.top_p = void 0;
|
|
684
|
+
warnings.push({
|
|
685
|
+
type: "unsupported-setting",
|
|
686
|
+
setting: "topP",
|
|
687
|
+
details: "topP is not supported for reasoning models"
|
|
688
|
+
});
|
|
689
|
+
}
|
|
690
|
+
if (baseArgs.logprobs != null) {
|
|
691
|
+
baseArgs.logprobs = void 0;
|
|
692
|
+
warnings.push({
|
|
693
|
+
type: "other",
|
|
694
|
+
message: "logprobs is not supported for reasoning models"
|
|
695
|
+
});
|
|
696
|
+
}
|
|
671
697
|
}
|
|
672
698
|
if (baseArgs.frequency_penalty != null) {
|
|
673
699
|
baseArgs.frequency_penalty = void 0;
|
|
@@ -692,13 +718,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
692
718
|
message: "logitBias is not supported for reasoning models"
|
|
693
719
|
});
|
|
694
720
|
}
|
|
695
|
-
if (baseArgs.logprobs != null) {
|
|
696
|
-
baseArgs.logprobs = void 0;
|
|
697
|
-
warnings.push({
|
|
698
|
-
type: "other",
|
|
699
|
-
message: "logprobs is not supported for reasoning models"
|
|
700
|
-
});
|
|
701
|
-
}
|
|
702
721
|
if (baseArgs.top_logprobs != null) {
|
|
703
722
|
baseArgs.top_logprobs = void 0;
|
|
704
723
|
warnings.push({
|
|
@@ -722,7 +741,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
722
741
|
});
|
|
723
742
|
}
|
|
724
743
|
}
|
|
725
|
-
if (openaiOptions.serviceTier === "flex" && !supportsFlexProcessing
|
|
744
|
+
if (openaiOptions.serviceTier === "flex" && !modelCapabilities.supportsFlexProcessing) {
|
|
726
745
|
warnings.push({
|
|
727
746
|
type: "unsupported-setting",
|
|
728
747
|
setting: "serviceTier",
|
|
@@ -730,7 +749,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
730
749
|
});
|
|
731
750
|
baseArgs.service_tier = void 0;
|
|
732
751
|
}
|
|
733
|
-
if (openaiOptions.serviceTier === "priority" && !supportsPriorityProcessing
|
|
752
|
+
if (openaiOptions.serviceTier === "priority" && !modelCapabilities.supportsPriorityProcessing) {
|
|
734
753
|
warnings.push({
|
|
735
754
|
type: "unsupported-setting",
|
|
736
755
|
setting: "serviceTier",
|
|
@@ -1051,42 +1070,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
1051
1070
|
};
|
|
1052
1071
|
}
|
|
1053
1072
|
};
|
|
1054
|
-
function isReasoningModel(modelId) {
|
|
1055
|
-
return (modelId.startsWith("o") || modelId.startsWith("gpt-5")) && !modelId.startsWith("gpt-5-chat");
|
|
1056
|
-
}
|
|
1057
|
-
function supportsFlexProcessing(modelId) {
|
|
1058
|
-
return modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
1059
|
-
}
|
|
1060
|
-
function supportsPriorityProcessing(modelId) {
|
|
1061
|
-
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
1062
|
-
}
|
|
1063
|
-
function getSystemMessageMode(modelId) {
|
|
1064
|
-
var _a, _b;
|
|
1065
|
-
if (!isReasoningModel(modelId)) {
|
|
1066
|
-
return "system";
|
|
1067
|
-
}
|
|
1068
|
-
return (_b = (_a = reasoningModels[modelId]) == null ? void 0 : _a.systemMessageMode) != null ? _b : "developer";
|
|
1069
|
-
}
|
|
1070
|
-
var reasoningModels = {
|
|
1071
|
-
o3: {
|
|
1072
|
-
systemMessageMode: "developer"
|
|
1073
|
-
},
|
|
1074
|
-
"o3-2025-04-16": {
|
|
1075
|
-
systemMessageMode: "developer"
|
|
1076
|
-
},
|
|
1077
|
-
"o3-mini": {
|
|
1078
|
-
systemMessageMode: "developer"
|
|
1079
|
-
},
|
|
1080
|
-
"o3-mini-2025-01-31": {
|
|
1081
|
-
systemMessageMode: "developer"
|
|
1082
|
-
},
|
|
1083
|
-
"o4-mini": {
|
|
1084
|
-
systemMessageMode: "developer"
|
|
1085
|
-
},
|
|
1086
|
-
"o4-mini-2025-04-16": {
|
|
1087
|
-
systemMessageMode: "developer"
|
|
1088
|
-
}
|
|
1089
|
-
};
|
|
1090
1073
|
|
|
1091
1074
|
// src/completion/openai-completion-language-model.ts
|
|
1092
1075
|
import {
|
|
@@ -3529,7 +3512,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3529
3512
|
}) {
|
|
3530
3513
|
var _a, _b, _c, _d;
|
|
3531
3514
|
const warnings = [];
|
|
3532
|
-
const
|
|
3515
|
+
const modelCapabilities = getOpenAILanguageModelCapabilities(this.modelId);
|
|
3533
3516
|
if (topK != null) {
|
|
3534
3517
|
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
3535
3518
|
}
|
|
@@ -3565,7 +3548,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3565
3548
|
}
|
|
3566
3549
|
const { input, warnings: inputWarnings } = await convertToOpenAIResponsesInput({
|
|
3567
3550
|
prompt,
|
|
3568
|
-
systemMessageMode:
|
|
3551
|
+
systemMessageMode: modelCapabilities.systemMessageMode,
|
|
3569
3552
|
fileIdPrefixes: this.config.fileIdPrefixes,
|
|
3570
3553
|
store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true,
|
|
3571
3554
|
hasLocalShellTool: hasOpenAITool("openai.local_shell")
|
|
@@ -3599,7 +3582,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3599
3582
|
addInclude("code_interpreter_call.outputs");
|
|
3600
3583
|
}
|
|
3601
3584
|
const store = openaiOptions == null ? void 0 : openaiOptions.store;
|
|
3602
|
-
if (store === false &&
|
|
3585
|
+
if (store === false && modelCapabilities.isReasoningModel) {
|
|
3603
3586
|
addInclude("reasoning.encrypted_content");
|
|
3604
3587
|
}
|
|
3605
3588
|
const baseArgs = {
|
|
@@ -3641,7 +3624,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3641
3624
|
top_logprobs: topLogprobs,
|
|
3642
3625
|
truncation: openaiOptions == null ? void 0 : openaiOptions.truncation,
|
|
3643
3626
|
// model-specific settings:
|
|
3644
|
-
...
|
|
3627
|
+
...modelCapabilities.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
3645
3628
|
reasoning: {
|
|
3646
3629
|
...(openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null && {
|
|
3647
3630
|
effort: openaiOptions.reasoningEffort
|
|
@@ -3652,22 +3635,24 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3652
3635
|
}
|
|
3653
3636
|
}
|
|
3654
3637
|
};
|
|
3655
|
-
if (
|
|
3656
|
-
if (
|
|
3657
|
-
baseArgs.temperature
|
|
3658
|
-
|
|
3659
|
-
|
|
3660
|
-
|
|
3661
|
-
|
|
3662
|
-
|
|
3663
|
-
|
|
3664
|
-
|
|
3665
|
-
baseArgs.top_p
|
|
3666
|
-
|
|
3667
|
-
|
|
3668
|
-
|
|
3669
|
-
|
|
3670
|
-
|
|
3638
|
+
if (modelCapabilities.isReasoningModel) {
|
|
3639
|
+
if (!((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) === "none" && modelCapabilities.supportsNonReasoningParameters)) {
|
|
3640
|
+
if (baseArgs.temperature != null) {
|
|
3641
|
+
baseArgs.temperature = void 0;
|
|
3642
|
+
warnings.push({
|
|
3643
|
+
type: "unsupported-setting",
|
|
3644
|
+
setting: "temperature",
|
|
3645
|
+
details: "temperature is not supported for reasoning models"
|
|
3646
|
+
});
|
|
3647
|
+
}
|
|
3648
|
+
if (baseArgs.top_p != null) {
|
|
3649
|
+
baseArgs.top_p = void 0;
|
|
3650
|
+
warnings.push({
|
|
3651
|
+
type: "unsupported-setting",
|
|
3652
|
+
setting: "topP",
|
|
3653
|
+
details: "topP is not supported for reasoning models"
|
|
3654
|
+
});
|
|
3655
|
+
}
|
|
3671
3656
|
}
|
|
3672
3657
|
} else {
|
|
3673
3658
|
if ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null) {
|
|
@@ -3685,7 +3670,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3685
3670
|
});
|
|
3686
3671
|
}
|
|
3687
3672
|
}
|
|
3688
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !
|
|
3673
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !modelCapabilities.supportsFlexProcessing) {
|
|
3689
3674
|
warnings.push({
|
|
3690
3675
|
type: "unsupported-setting",
|
|
3691
3676
|
setting: "serviceTier",
|
|
@@ -3693,7 +3678,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3693
3678
|
});
|
|
3694
3679
|
delete baseArgs.service_tier;
|
|
3695
3680
|
}
|
|
3696
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !
|
|
3681
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !modelCapabilities.supportsPriorityProcessing) {
|
|
3697
3682
|
warnings.push({
|
|
3698
3683
|
type: "unsupported-setting",
|
|
3699
3684
|
setting: "serviceTier",
|
|
@@ -4550,32 +4535,6 @@ function isResponseAnnotationAddedChunk(chunk) {
|
|
|
4550
4535
|
function isErrorChunk(chunk) {
|
|
4551
4536
|
return chunk.type === "error";
|
|
4552
4537
|
}
|
|
4553
|
-
function getResponsesModelConfig(modelId) {
|
|
4554
|
-
const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
4555
|
-
const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
4556
|
-
const defaults = {
|
|
4557
|
-
systemMessageMode: "system",
|
|
4558
|
-
supportsFlexProcessing: supportsFlexProcessing2,
|
|
4559
|
-
supportsPriorityProcessing: supportsPriorityProcessing2
|
|
4560
|
-
};
|
|
4561
|
-
if (modelId.startsWith("gpt-5-chat")) {
|
|
4562
|
-
return {
|
|
4563
|
-
...defaults,
|
|
4564
|
-
isReasoningModel: false
|
|
4565
|
-
};
|
|
4566
|
-
}
|
|
4567
|
-
if (modelId.startsWith("o") || modelId.startsWith("gpt-5") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
|
|
4568
|
-
return {
|
|
4569
|
-
...defaults,
|
|
4570
|
-
isReasoningModel: true,
|
|
4571
|
-
systemMessageMode: "developer"
|
|
4572
|
-
};
|
|
4573
|
-
}
|
|
4574
|
-
return {
|
|
4575
|
-
...defaults,
|
|
4576
|
-
isReasoningModel: false
|
|
4577
|
-
};
|
|
4578
|
-
}
|
|
4579
4538
|
function mapWebSearchOutput(action) {
|
|
4580
4539
|
var _a;
|
|
4581
4540
|
switch (action.type) {
|