@ai-sdk/openai 3.0.0-beta.84 → 3.0.0-beta.85
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +51 -62
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +51 -62
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +50 -61
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +50 -61
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
package/dist/index.js
CHANGED
|
@@ -52,13 +52,20 @@ var openaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorRespo
|
|
|
52
52
|
errorToMessage: (data) => data.error.message
|
|
53
53
|
});
|
|
54
54
|
|
|
55
|
-
// src/openai-
|
|
56
|
-
function
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
55
|
+
// src/openai-language-model-capabilities.ts
|
|
56
|
+
function getOpenAILanguageModelCapabilities(modelId) {
|
|
57
|
+
const supportsFlexProcessing = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
58
|
+
const supportsPriorityProcessing = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
59
|
+
const isReasoningModel = !(modelId.startsWith("gpt-3") || modelId.startsWith("gpt-4") || modelId.startsWith("chatgpt-4o") || modelId.startsWith("gpt-5-chat"));
|
|
60
|
+
const supportsNonReasoningParameters = modelId.startsWith("gpt-5.1");
|
|
61
|
+
const systemMessageMode = isReasoningModel ? "developer" : "system";
|
|
62
|
+
return {
|
|
63
|
+
supportsFlexProcessing,
|
|
64
|
+
supportsPriorityProcessing,
|
|
65
|
+
isReasoningModel,
|
|
66
|
+
systemMessageMode,
|
|
67
|
+
supportsNonReasoningParameters
|
|
68
|
+
};
|
|
62
69
|
}
|
|
63
70
|
|
|
64
71
|
// src/chat/convert-to-openai-chat-messages.ts
|
|
@@ -608,13 +615,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
608
615
|
providerOptions,
|
|
609
616
|
schema: openaiChatLanguageModelOptions
|
|
610
617
|
})) != null ? _a : {};
|
|
618
|
+
const modelCapabilities = getOpenAILanguageModelCapabilities(this.modelId);
|
|
611
619
|
if (topK != null) {
|
|
612
620
|
warnings.push({ type: "unsupported", feature: "topK" });
|
|
613
621
|
}
|
|
614
622
|
const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(
|
|
615
623
|
{
|
|
616
624
|
prompt,
|
|
617
|
-
systemMessageMode:
|
|
625
|
+
systemMessageMode: modelCapabilities.systemMessageMode
|
|
618
626
|
}
|
|
619
627
|
);
|
|
620
628
|
warnings.push(...messageWarnings);
|
|
@@ -660,22 +668,31 @@ var OpenAIChatLanguageModel = class {
|
|
|
660
668
|
// messages:
|
|
661
669
|
messages
|
|
662
670
|
};
|
|
663
|
-
if (isReasoningModel
|
|
664
|
-
if (
|
|
665
|
-
baseArgs.temperature
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
baseArgs.top_p
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
671
|
+
if (modelCapabilities.isReasoningModel) {
|
|
672
|
+
if (openaiOptions.reasoningEffort !== "none" || !modelCapabilities.supportsNonReasoningParameters) {
|
|
673
|
+
if (baseArgs.temperature != null) {
|
|
674
|
+
baseArgs.temperature = void 0;
|
|
675
|
+
warnings.push({
|
|
676
|
+
type: "unsupported",
|
|
677
|
+
feature: "temperature",
|
|
678
|
+
details: "temperature is not supported for reasoning models"
|
|
679
|
+
});
|
|
680
|
+
}
|
|
681
|
+
if (baseArgs.top_p != null) {
|
|
682
|
+
baseArgs.top_p = void 0;
|
|
683
|
+
warnings.push({
|
|
684
|
+
type: "unsupported",
|
|
685
|
+
feature: "topP",
|
|
686
|
+
details: "topP is not supported for reasoning models"
|
|
687
|
+
});
|
|
688
|
+
}
|
|
689
|
+
if (baseArgs.logprobs != null) {
|
|
690
|
+
baseArgs.logprobs = void 0;
|
|
691
|
+
warnings.push({
|
|
692
|
+
type: "other",
|
|
693
|
+
message: "logprobs is not supported for reasoning models"
|
|
694
|
+
});
|
|
695
|
+
}
|
|
679
696
|
}
|
|
680
697
|
if (baseArgs.frequency_penalty != null) {
|
|
681
698
|
baseArgs.frequency_penalty = void 0;
|
|
@@ -700,13 +717,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
700
717
|
message: "logitBias is not supported for reasoning models"
|
|
701
718
|
});
|
|
702
719
|
}
|
|
703
|
-
if (baseArgs.logprobs != null) {
|
|
704
|
-
baseArgs.logprobs = void 0;
|
|
705
|
-
warnings.push({
|
|
706
|
-
type: "other",
|
|
707
|
-
message: "logprobs is not supported for reasoning models"
|
|
708
|
-
});
|
|
709
|
-
}
|
|
710
720
|
if (baseArgs.top_logprobs != null) {
|
|
711
721
|
baseArgs.top_logprobs = void 0;
|
|
712
722
|
warnings.push({
|
|
@@ -730,7 +740,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
730
740
|
});
|
|
731
741
|
}
|
|
732
742
|
}
|
|
733
|
-
if (openaiOptions.serviceTier === "flex" && !supportsFlexProcessing
|
|
743
|
+
if (openaiOptions.serviceTier === "flex" && !modelCapabilities.supportsFlexProcessing) {
|
|
734
744
|
warnings.push({
|
|
735
745
|
type: "unsupported",
|
|
736
746
|
feature: "serviceTier",
|
|
@@ -738,7 +748,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
738
748
|
});
|
|
739
749
|
baseArgs.service_tier = void 0;
|
|
740
750
|
}
|
|
741
|
-
if (openaiOptions.serviceTier === "priority" && !supportsPriorityProcessing
|
|
751
|
+
if (openaiOptions.serviceTier === "priority" && !modelCapabilities.supportsPriorityProcessing) {
|
|
742
752
|
warnings.push({
|
|
743
753
|
type: "unsupported",
|
|
744
754
|
feature: "serviceTier",
|
|
@@ -1057,15 +1067,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
1057
1067
|
};
|
|
1058
1068
|
}
|
|
1059
1069
|
};
|
|
1060
|
-
function supportsFlexProcessing(modelId) {
|
|
1061
|
-
return modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
1062
|
-
}
|
|
1063
|
-
function supportsPriorityProcessing(modelId) {
|
|
1064
|
-
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
1065
|
-
}
|
|
1066
|
-
function getSystemMessageMode(modelId) {
|
|
1067
|
-
return isReasoningModel(modelId) ? "developer" : "system";
|
|
1068
|
-
}
|
|
1069
1070
|
|
|
1070
1071
|
// src/completion/openai-completion-language-model.ts
|
|
1071
1072
|
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
@@ -3723,7 +3724,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3723
3724
|
}) {
|
|
3724
3725
|
var _a, _b, _c, _d;
|
|
3725
3726
|
const warnings = [];
|
|
3726
|
-
const
|
|
3727
|
+
const modelCapabilities = getOpenAILanguageModelCapabilities(this.modelId);
|
|
3727
3728
|
if (topK != null) {
|
|
3728
3729
|
warnings.push({ type: "unsupported", feature: "topK" });
|
|
3729
3730
|
}
|
|
@@ -3768,7 +3769,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3768
3769
|
const { input, warnings: inputWarnings } = await convertToOpenAIResponsesInput({
|
|
3769
3770
|
prompt,
|
|
3770
3771
|
toolNameMapping,
|
|
3771
|
-
systemMessageMode:
|
|
3772
|
+
systemMessageMode: modelCapabilities.systemMessageMode,
|
|
3772
3773
|
fileIdPrefixes: this.config.fileIdPrefixes,
|
|
3773
3774
|
store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true,
|
|
3774
3775
|
hasLocalShellTool: hasOpenAITool("openai.local_shell"),
|
|
@@ -3802,7 +3803,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3802
3803
|
addInclude("code_interpreter_call.outputs");
|
|
3803
3804
|
}
|
|
3804
3805
|
const store = openaiOptions == null ? void 0 : openaiOptions.store;
|
|
3805
|
-
if (store === false &&
|
|
3806
|
+
if (store === false && modelCapabilities.isReasoningModel) {
|
|
3806
3807
|
addInclude("reasoning.encrypted_content");
|
|
3807
3808
|
}
|
|
3808
3809
|
const baseArgs = {
|
|
@@ -3844,7 +3845,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3844
3845
|
top_logprobs: topLogprobs,
|
|
3845
3846
|
truncation: openaiOptions == null ? void 0 : openaiOptions.truncation,
|
|
3846
3847
|
// model-specific settings:
|
|
3847
|
-
...
|
|
3848
|
+
...modelCapabilities.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
3848
3849
|
reasoning: {
|
|
3849
3850
|
...(openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null && {
|
|
3850
3851
|
effort: openaiOptions.reasoningEffort
|
|
@@ -3855,7 +3856,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3855
3856
|
}
|
|
3856
3857
|
}
|
|
3857
3858
|
};
|
|
3858
|
-
if (
|
|
3859
|
+
if (modelCapabilities.isReasoningModel || (openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) === "none" && modelCapabilities.supportsNonReasoningParameters) {
|
|
3859
3860
|
if (baseArgs.temperature != null) {
|
|
3860
3861
|
baseArgs.temperature = void 0;
|
|
3861
3862
|
warnings.push({
|
|
@@ -3888,7 +3889,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3888
3889
|
});
|
|
3889
3890
|
}
|
|
3890
3891
|
}
|
|
3891
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !
|
|
3892
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !modelCapabilities.supportsFlexProcessing) {
|
|
3892
3893
|
warnings.push({
|
|
3893
3894
|
type: "unsupported",
|
|
3894
3895
|
feature: "serviceTier",
|
|
@@ -3896,7 +3897,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3896
3897
|
});
|
|
3897
3898
|
delete baseArgs.service_tier;
|
|
3898
3899
|
}
|
|
3899
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !
|
|
3900
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !modelCapabilities.supportsPriorityProcessing) {
|
|
3900
3901
|
warnings.push({
|
|
3901
3902
|
type: "unsupported",
|
|
3902
3903
|
feature: "serviceTier",
|
|
@@ -5029,18 +5030,6 @@ function isResponseAnnotationAddedChunk(chunk) {
|
|
|
5029
5030
|
function isErrorChunk(chunk) {
|
|
5030
5031
|
return chunk.type === "error";
|
|
5031
5032
|
}
|
|
5032
|
-
function getResponsesModelConfig(modelId) {
|
|
5033
|
-
const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
5034
|
-
const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
5035
|
-
const isReasoningModel2 = isReasoningModel(modelId);
|
|
5036
|
-
const systemMessageMode = isReasoningModel2 ? "developer" : "system";
|
|
5037
|
-
return {
|
|
5038
|
-
systemMessageMode,
|
|
5039
|
-
supportsFlexProcessing: supportsFlexProcessing2,
|
|
5040
|
-
supportsPriorityProcessing: supportsPriorityProcessing2,
|
|
5041
|
-
isReasoningModel: isReasoningModel2
|
|
5042
|
-
};
|
|
5043
|
-
}
|
|
5044
5033
|
function mapWebSearchOutput(action) {
|
|
5045
5034
|
var _a;
|
|
5046
5035
|
switch (action.type) {
|
|
@@ -5412,7 +5401,7 @@ var OpenAITranscriptionModel = class {
|
|
|
5412
5401
|
};
|
|
5413
5402
|
|
|
5414
5403
|
// src/version.ts
|
|
5415
|
-
var VERSION = true ? "3.0.0-beta.
|
|
5404
|
+
var VERSION = true ? "3.0.0-beta.85" : "0.0.0-test";
|
|
5416
5405
|
|
|
5417
5406
|
// src/openai-provider.ts
|
|
5418
5407
|
function createOpenAI(options = {}) {
|