@ai-sdk/openai 2.0.77 → 2.0.79
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +70 -111
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +70 -111
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +69 -110
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +69 -110
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# @ai-sdk/openai
|
|
2
2
|
|
|
3
|
+
## 2.0.79
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 5b6aa88: fix(openai): fix parameter exclusion logic
|
|
8
|
+
|
|
9
|
+
## 2.0.78
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- e245b61: fix(openai): allow temperature etc setting when reasoning effort is none for gpt-5.1
|
|
14
|
+
|
|
3
15
|
## 2.0.77
|
|
4
16
|
|
|
5
17
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -52,6 +52,22 @@ var openaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorRespo
|
|
|
52
52
|
errorToMessage: (data) => data.error.message
|
|
53
53
|
});
|
|
54
54
|
|
|
55
|
+
// src/openai-language-model-capabilities.ts
|
|
56
|
+
function getOpenAILanguageModelCapabilities(modelId) {
|
|
57
|
+
const supportsFlexProcessing = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
58
|
+
const supportsPriorityProcessing = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
59
|
+
const isReasoningModel = !(modelId.startsWith("gpt-3") || modelId.startsWith("gpt-4") || modelId.startsWith("chatgpt-4o") || modelId.startsWith("gpt-5-chat"));
|
|
60
|
+
const supportsNonReasoningParameters = modelId.startsWith("gpt-5.1");
|
|
61
|
+
const systemMessageMode = isReasoningModel ? "developer" : "system";
|
|
62
|
+
return {
|
|
63
|
+
supportsFlexProcessing,
|
|
64
|
+
supportsPriorityProcessing,
|
|
65
|
+
isReasoningModel,
|
|
66
|
+
systemMessageMode,
|
|
67
|
+
supportsNonReasoningParameters
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
|
|
55
71
|
// src/chat/convert-to-openai-chat-messages.ts
|
|
56
72
|
var import_provider = require("@ai-sdk/provider");
|
|
57
73
|
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
@@ -601,6 +617,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
601
617
|
schema: openaiChatLanguageModelOptions
|
|
602
618
|
})) != null ? _a : {};
|
|
603
619
|
const structuredOutputs = (_b = openaiOptions.structuredOutputs) != null ? _b : true;
|
|
620
|
+
const modelCapabilities = getOpenAILanguageModelCapabilities(this.modelId);
|
|
604
621
|
if (topK != null) {
|
|
605
622
|
warnings.push({
|
|
606
623
|
type: "unsupported-setting",
|
|
@@ -617,7 +634,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
617
634
|
const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(
|
|
618
635
|
{
|
|
619
636
|
prompt,
|
|
620
|
-
systemMessageMode:
|
|
637
|
+
systemMessageMode: modelCapabilities.systemMessageMode
|
|
621
638
|
}
|
|
622
639
|
);
|
|
623
640
|
warnings.push(...messageWarnings);
|
|
@@ -663,22 +680,31 @@ var OpenAIChatLanguageModel = class {
|
|
|
663
680
|
// messages:
|
|
664
681
|
messages
|
|
665
682
|
};
|
|
666
|
-
if (isReasoningModel
|
|
667
|
-
if (
|
|
668
|
-
baseArgs.temperature
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
baseArgs.top_p
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
683
|
+
if (modelCapabilities.isReasoningModel) {
|
|
684
|
+
if (openaiOptions.reasoningEffort !== "none" || !modelCapabilities.supportsNonReasoningParameters) {
|
|
685
|
+
if (baseArgs.temperature != null) {
|
|
686
|
+
baseArgs.temperature = void 0;
|
|
687
|
+
warnings.push({
|
|
688
|
+
type: "unsupported-setting",
|
|
689
|
+
setting: "temperature",
|
|
690
|
+
details: "temperature is not supported for reasoning models"
|
|
691
|
+
});
|
|
692
|
+
}
|
|
693
|
+
if (baseArgs.top_p != null) {
|
|
694
|
+
baseArgs.top_p = void 0;
|
|
695
|
+
warnings.push({
|
|
696
|
+
type: "unsupported-setting",
|
|
697
|
+
setting: "topP",
|
|
698
|
+
details: "topP is not supported for reasoning models"
|
|
699
|
+
});
|
|
700
|
+
}
|
|
701
|
+
if (baseArgs.logprobs != null) {
|
|
702
|
+
baseArgs.logprobs = void 0;
|
|
703
|
+
warnings.push({
|
|
704
|
+
type: "other",
|
|
705
|
+
message: "logprobs is not supported for reasoning models"
|
|
706
|
+
});
|
|
707
|
+
}
|
|
682
708
|
}
|
|
683
709
|
if (baseArgs.frequency_penalty != null) {
|
|
684
710
|
baseArgs.frequency_penalty = void 0;
|
|
@@ -703,13 +729,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
703
729
|
message: "logitBias is not supported for reasoning models"
|
|
704
730
|
});
|
|
705
731
|
}
|
|
706
|
-
if (baseArgs.logprobs != null) {
|
|
707
|
-
baseArgs.logprobs = void 0;
|
|
708
|
-
warnings.push({
|
|
709
|
-
type: "other",
|
|
710
|
-
message: "logprobs is not supported for reasoning models"
|
|
711
|
-
});
|
|
712
|
-
}
|
|
713
732
|
if (baseArgs.top_logprobs != null) {
|
|
714
733
|
baseArgs.top_logprobs = void 0;
|
|
715
734
|
warnings.push({
|
|
@@ -733,7 +752,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
733
752
|
});
|
|
734
753
|
}
|
|
735
754
|
}
|
|
736
|
-
if (openaiOptions.serviceTier === "flex" && !supportsFlexProcessing
|
|
755
|
+
if (openaiOptions.serviceTier === "flex" && !modelCapabilities.supportsFlexProcessing) {
|
|
737
756
|
warnings.push({
|
|
738
757
|
type: "unsupported-setting",
|
|
739
758
|
setting: "serviceTier",
|
|
@@ -741,7 +760,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
741
760
|
});
|
|
742
761
|
baseArgs.service_tier = void 0;
|
|
743
762
|
}
|
|
744
|
-
if (openaiOptions.serviceTier === "priority" && !supportsPriorityProcessing
|
|
763
|
+
if (openaiOptions.serviceTier === "priority" && !modelCapabilities.supportsPriorityProcessing) {
|
|
745
764
|
warnings.push({
|
|
746
765
|
type: "unsupported-setting",
|
|
747
766
|
setting: "serviceTier",
|
|
@@ -1062,42 +1081,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
1062
1081
|
};
|
|
1063
1082
|
}
|
|
1064
1083
|
};
|
|
1065
|
-
function isReasoningModel(modelId) {
|
|
1066
|
-
return (modelId.startsWith("o") || modelId.startsWith("gpt-5")) && !modelId.startsWith("gpt-5-chat");
|
|
1067
|
-
}
|
|
1068
|
-
function supportsFlexProcessing(modelId) {
|
|
1069
|
-
return modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
1070
|
-
}
|
|
1071
|
-
function supportsPriorityProcessing(modelId) {
|
|
1072
|
-
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
1073
|
-
}
|
|
1074
|
-
function getSystemMessageMode(modelId) {
|
|
1075
|
-
var _a, _b;
|
|
1076
|
-
if (!isReasoningModel(modelId)) {
|
|
1077
|
-
return "system";
|
|
1078
|
-
}
|
|
1079
|
-
return (_b = (_a = reasoningModels[modelId]) == null ? void 0 : _a.systemMessageMode) != null ? _b : "developer";
|
|
1080
|
-
}
|
|
1081
|
-
var reasoningModels = {
|
|
1082
|
-
o3: {
|
|
1083
|
-
systemMessageMode: "developer"
|
|
1084
|
-
},
|
|
1085
|
-
"o3-2025-04-16": {
|
|
1086
|
-
systemMessageMode: "developer"
|
|
1087
|
-
},
|
|
1088
|
-
"o3-mini": {
|
|
1089
|
-
systemMessageMode: "developer"
|
|
1090
|
-
},
|
|
1091
|
-
"o3-mini-2025-01-31": {
|
|
1092
|
-
systemMessageMode: "developer"
|
|
1093
|
-
},
|
|
1094
|
-
"o4-mini": {
|
|
1095
|
-
systemMessageMode: "developer"
|
|
1096
|
-
},
|
|
1097
|
-
"o4-mini-2025-04-16": {
|
|
1098
|
-
systemMessageMode: "developer"
|
|
1099
|
-
}
|
|
1100
|
-
};
|
|
1101
1084
|
|
|
1102
1085
|
// src/completion/openai-completion-language-model.ts
|
|
1103
1086
|
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
@@ -3164,7 +3147,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3164
3147
|
}) {
|
|
3165
3148
|
var _a, _b, _c, _d;
|
|
3166
3149
|
const warnings = [];
|
|
3167
|
-
const
|
|
3150
|
+
const modelCapabilities = getOpenAILanguageModelCapabilities(this.modelId);
|
|
3168
3151
|
if (topK != null) {
|
|
3169
3152
|
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
3170
3153
|
}
|
|
@@ -3200,7 +3183,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3200
3183
|
}
|
|
3201
3184
|
const { input, warnings: inputWarnings } = await convertToOpenAIResponsesInput({
|
|
3202
3185
|
prompt,
|
|
3203
|
-
systemMessageMode:
|
|
3186
|
+
systemMessageMode: modelCapabilities.systemMessageMode,
|
|
3204
3187
|
fileIdPrefixes: this.config.fileIdPrefixes,
|
|
3205
3188
|
store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true,
|
|
3206
3189
|
hasLocalShellTool: hasOpenAITool("openai.local_shell")
|
|
@@ -3234,7 +3217,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3234
3217
|
addInclude("code_interpreter_call.outputs");
|
|
3235
3218
|
}
|
|
3236
3219
|
const store = openaiOptions == null ? void 0 : openaiOptions.store;
|
|
3237
|
-
if (store === false &&
|
|
3220
|
+
if (store === false && modelCapabilities.isReasoningModel) {
|
|
3238
3221
|
addInclude("reasoning.encrypted_content");
|
|
3239
3222
|
}
|
|
3240
3223
|
const baseArgs = {
|
|
@@ -3276,7 +3259,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3276
3259
|
top_logprobs: topLogprobs,
|
|
3277
3260
|
truncation: openaiOptions == null ? void 0 : openaiOptions.truncation,
|
|
3278
3261
|
// model-specific settings:
|
|
3279
|
-
...
|
|
3262
|
+
...modelCapabilities.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
3280
3263
|
reasoning: {
|
|
3281
3264
|
...(openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null && {
|
|
3282
3265
|
effort: openaiOptions.reasoningEffort
|
|
@@ -3287,22 +3270,24 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3287
3270
|
}
|
|
3288
3271
|
}
|
|
3289
3272
|
};
|
|
3290
|
-
if (
|
|
3291
|
-
if (
|
|
3292
|
-
baseArgs.temperature
|
|
3293
|
-
|
|
3294
|
-
|
|
3295
|
-
|
|
3296
|
-
|
|
3297
|
-
|
|
3298
|
-
|
|
3299
|
-
|
|
3300
|
-
baseArgs.top_p
|
|
3301
|
-
|
|
3302
|
-
|
|
3303
|
-
|
|
3304
|
-
|
|
3305
|
-
|
|
3273
|
+
if (modelCapabilities.isReasoningModel) {
|
|
3274
|
+
if (!((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) === "none" && modelCapabilities.supportsNonReasoningParameters)) {
|
|
3275
|
+
if (baseArgs.temperature != null) {
|
|
3276
|
+
baseArgs.temperature = void 0;
|
|
3277
|
+
warnings.push({
|
|
3278
|
+
type: "unsupported-setting",
|
|
3279
|
+
setting: "temperature",
|
|
3280
|
+
details: "temperature is not supported for reasoning models"
|
|
3281
|
+
});
|
|
3282
|
+
}
|
|
3283
|
+
if (baseArgs.top_p != null) {
|
|
3284
|
+
baseArgs.top_p = void 0;
|
|
3285
|
+
warnings.push({
|
|
3286
|
+
type: "unsupported-setting",
|
|
3287
|
+
setting: "topP",
|
|
3288
|
+
details: "topP is not supported for reasoning models"
|
|
3289
|
+
});
|
|
3290
|
+
}
|
|
3306
3291
|
}
|
|
3307
3292
|
} else {
|
|
3308
3293
|
if ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null) {
|
|
@@ -3320,7 +3305,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3320
3305
|
});
|
|
3321
3306
|
}
|
|
3322
3307
|
}
|
|
3323
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !
|
|
3308
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !modelCapabilities.supportsFlexProcessing) {
|
|
3324
3309
|
warnings.push({
|
|
3325
3310
|
type: "unsupported-setting",
|
|
3326
3311
|
setting: "serviceTier",
|
|
@@ -3328,7 +3313,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3328
3313
|
});
|
|
3329
3314
|
delete baseArgs.service_tier;
|
|
3330
3315
|
}
|
|
3331
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !
|
|
3316
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !modelCapabilities.supportsPriorityProcessing) {
|
|
3332
3317
|
warnings.push({
|
|
3333
3318
|
type: "unsupported-setting",
|
|
3334
3319
|
setting: "serviceTier",
|
|
@@ -4185,32 +4170,6 @@ function isResponseAnnotationAddedChunk(chunk) {
|
|
|
4185
4170
|
function isErrorChunk(chunk) {
|
|
4186
4171
|
return chunk.type === "error";
|
|
4187
4172
|
}
|
|
4188
|
-
function getResponsesModelConfig(modelId) {
|
|
4189
|
-
const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
4190
|
-
const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
4191
|
-
const defaults = {
|
|
4192
|
-
systemMessageMode: "system",
|
|
4193
|
-
supportsFlexProcessing: supportsFlexProcessing2,
|
|
4194
|
-
supportsPriorityProcessing: supportsPriorityProcessing2
|
|
4195
|
-
};
|
|
4196
|
-
if (modelId.startsWith("gpt-5-chat")) {
|
|
4197
|
-
return {
|
|
4198
|
-
...defaults,
|
|
4199
|
-
isReasoningModel: false
|
|
4200
|
-
};
|
|
4201
|
-
}
|
|
4202
|
-
if (modelId.startsWith("o") || modelId.startsWith("gpt-5") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
|
|
4203
|
-
return {
|
|
4204
|
-
...defaults,
|
|
4205
|
-
isReasoningModel: true,
|
|
4206
|
-
systemMessageMode: "developer"
|
|
4207
|
-
};
|
|
4208
|
-
}
|
|
4209
|
-
return {
|
|
4210
|
-
...defaults,
|
|
4211
|
-
isReasoningModel: false
|
|
4212
|
-
};
|
|
4213
|
-
}
|
|
4214
4173
|
function mapWebSearchOutput(action) {
|
|
4215
4174
|
var _a;
|
|
4216
4175
|
switch (action.type) {
|
|
@@ -4582,7 +4541,7 @@ var OpenAITranscriptionModel = class {
|
|
|
4582
4541
|
};
|
|
4583
4542
|
|
|
4584
4543
|
// src/version.ts
|
|
4585
|
-
var VERSION = true ? "2.0.
|
|
4544
|
+
var VERSION = true ? "2.0.79" : "0.0.0-test";
|
|
4586
4545
|
|
|
4587
4546
|
// src/openai-provider.ts
|
|
4588
4547
|
function createOpenAI(options = {}) {
|