@ai-sdk/openai 3.0.0-beta.51 → 3.0.0-beta.53
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +28 -57
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +28 -57
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +27 -56
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +27 -56
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -31,6 +31,15 @@ var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
|
31
31
|
errorToMessage: (data) => data.error.message
|
|
32
32
|
});
|
|
33
33
|
|
|
34
|
+
// src/openai-is-reasoning-model.ts
|
|
35
|
+
function isReasoningModel(modelId) {
|
|
36
|
+
if (modelId.startsWith("gpt-3")) return false;
|
|
37
|
+
if (modelId.startsWith("gpt-4")) return false;
|
|
38
|
+
if (modelId.startsWith("chatgpt-4o")) return false;
|
|
39
|
+
if (modelId.startsWith("gpt-5-chat")) return false;
|
|
40
|
+
return true;
|
|
41
|
+
}
|
|
42
|
+
|
|
34
43
|
// src/chat/convert-to-openai-chat-messages.ts
|
|
35
44
|
import {
|
|
36
45
|
UnsupportedFunctionalityError
|
|
@@ -224,7 +233,7 @@ function getResponseMetadata({
|
|
|
224
233
|
return {
|
|
225
234
|
id: id != null ? id : void 0,
|
|
226
235
|
modelId: model != null ? model : void 0,
|
|
227
|
-
timestamp: created
|
|
236
|
+
timestamp: created ? new Date(created * 1e3) : void 0
|
|
228
237
|
};
|
|
229
238
|
}
|
|
230
239
|
|
|
@@ -851,7 +860,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
851
860
|
outputTokens: void 0,
|
|
852
861
|
totalTokens: void 0
|
|
853
862
|
};
|
|
854
|
-
let
|
|
863
|
+
let metadataExtracted = false;
|
|
855
864
|
let isActiveText = false;
|
|
856
865
|
const providerMetadata = { openai: {} };
|
|
857
866
|
return {
|
|
@@ -876,12 +885,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
876
885
|
controller.enqueue({ type: "error", error: value.error });
|
|
877
886
|
return;
|
|
878
887
|
}
|
|
879
|
-
if (
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
888
|
+
if (!metadataExtracted) {
|
|
889
|
+
const metadata = getResponseMetadata(value);
|
|
890
|
+
if (Object.values(metadata).some(Boolean)) {
|
|
891
|
+
metadataExtracted = true;
|
|
892
|
+
controller.enqueue({
|
|
893
|
+
type: "response-metadata",
|
|
894
|
+
...getResponseMetadata(value)
|
|
895
|
+
});
|
|
896
|
+
}
|
|
885
897
|
}
|
|
886
898
|
if (value.usage != null) {
|
|
887
899
|
usage.inputTokens = (_a = value.usage.prompt_tokens) != null ? _a : void 0;
|
|
@@ -1036,9 +1048,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
1036
1048
|
};
|
|
1037
1049
|
}
|
|
1038
1050
|
};
|
|
1039
|
-
function isReasoningModel(modelId) {
|
|
1040
|
-
return (modelId.startsWith("o") || modelId.startsWith("gpt-5")) && !modelId.startsWith("gpt-5-chat");
|
|
1041
|
-
}
|
|
1042
1051
|
function supportsFlexProcessing(modelId) {
|
|
1043
1052
|
return modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
1044
1053
|
}
|
|
@@ -1046,32 +1055,8 @@ function supportsPriorityProcessing(modelId) {
|
|
|
1046
1055
|
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
1047
1056
|
}
|
|
1048
1057
|
function getSystemMessageMode(modelId) {
|
|
1049
|
-
|
|
1050
|
-
if (!isReasoningModel(modelId)) {
|
|
1051
|
-
return "system";
|
|
1052
|
-
}
|
|
1053
|
-
return (_b = (_a = reasoningModels[modelId]) == null ? void 0 : _a.systemMessageMode) != null ? _b : "developer";
|
|
1058
|
+
return isReasoningModel(modelId) ? "developer" : "system";
|
|
1054
1059
|
}
|
|
1055
|
-
var reasoningModels = {
|
|
1056
|
-
o3: {
|
|
1057
|
-
systemMessageMode: "developer"
|
|
1058
|
-
},
|
|
1059
|
-
"o3-2025-04-16": {
|
|
1060
|
-
systemMessageMode: "developer"
|
|
1061
|
-
},
|
|
1062
|
-
"o3-mini": {
|
|
1063
|
-
systemMessageMode: "developer"
|
|
1064
|
-
},
|
|
1065
|
-
"o3-mini-2025-01-31": {
|
|
1066
|
-
systemMessageMode: "developer"
|
|
1067
|
-
},
|
|
1068
|
-
"o4-mini": {
|
|
1069
|
-
systemMessageMode: "developer"
|
|
1070
|
-
},
|
|
1071
|
-
"o4-mini-2025-04-16": {
|
|
1072
|
-
systemMessageMode: "developer"
|
|
1073
|
-
}
|
|
1074
|
-
};
|
|
1075
1060
|
|
|
1076
1061
|
// src/completion/openai-completion-language-model.ts
|
|
1077
1062
|
import {
|
|
@@ -4517,27 +4502,13 @@ function isErrorChunk(chunk) {
|
|
|
4517
4502
|
function getResponsesModelConfig(modelId) {
|
|
4518
4503
|
const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
4519
4504
|
const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
4520
|
-
const
|
|
4521
|
-
|
|
4522
|
-
supportsFlexProcessing: supportsFlexProcessing2,
|
|
4523
|
-
supportsPriorityProcessing: supportsPriorityProcessing2
|
|
4524
|
-
};
|
|
4525
|
-
if (modelId.startsWith("gpt-5-chat")) {
|
|
4526
|
-
return {
|
|
4527
|
-
...defaults,
|
|
4528
|
-
isReasoningModel: false
|
|
4529
|
-
};
|
|
4530
|
-
}
|
|
4531
|
-
if (modelId.startsWith("o") || modelId.startsWith("gpt-5") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
|
|
4532
|
-
return {
|
|
4533
|
-
...defaults,
|
|
4534
|
-
isReasoningModel: true,
|
|
4535
|
-
systemMessageMode: "developer"
|
|
4536
|
-
};
|
|
4537
|
-
}
|
|
4505
|
+
const isReasoningModel2 = isReasoningModel(modelId);
|
|
4506
|
+
const systemMessageMode = isReasoningModel2 ? "developer" : "system";
|
|
4538
4507
|
return {
|
|
4539
|
-
|
|
4540
|
-
|
|
4508
|
+
systemMessageMode,
|
|
4509
|
+
supportsFlexProcessing: supportsFlexProcessing2,
|
|
4510
|
+
supportsPriorityProcessing: supportsPriorityProcessing2,
|
|
4511
|
+
isReasoningModel: isReasoningModel2
|
|
4541
4512
|
};
|
|
4542
4513
|
}
|
|
4543
4514
|
function mapWebSearchOutput(action) {
|