@ai-sdk/openai 3.0.0-beta.51 → 3.0.0-beta.53

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 3.0.0-beta.53
4
+
5
+ ### Patch Changes
6
+
7
+ - dae2185: fix(openai): extract meta data from first chunk that contains any
8
+
9
+ ## 3.0.0-beta.52
10
+
11
+ ### Patch Changes
12
+
13
+ - 348fd10: fix(openai): treat unknown models as reasoning
14
+
3
15
  ## 3.0.0-beta.51
4
16
 
5
17
  ### Patch Changes
package/dist/index.js CHANGED
@@ -52,6 +52,15 @@ var openaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorRespo
52
52
  errorToMessage: (data) => data.error.message
53
53
  });
54
54
 
55
+ // src/openai-is-reasoning-model.ts
56
+ function isReasoningModel(modelId) {
57
+ if (modelId.startsWith("gpt-3")) return false;
58
+ if (modelId.startsWith("gpt-4")) return false;
59
+ if (modelId.startsWith("chatgpt-4o")) return false;
60
+ if (modelId.startsWith("gpt-5-chat")) return false;
61
+ return true;
62
+ }
63
+
55
64
  // src/chat/convert-to-openai-chat-messages.ts
56
65
  var import_provider = require("@ai-sdk/provider");
57
66
  var import_provider_utils2 = require("@ai-sdk/provider-utils");
@@ -243,7 +252,7 @@ function getResponseMetadata({
243
252
  return {
244
253
  id: id != null ? id : void 0,
245
254
  modelId: model != null ? model : void 0,
246
- timestamp: created != null ? new Date(created * 1e3) : void 0
255
+ timestamp: created ? new Date(created * 1e3) : void 0
247
256
  };
248
257
  }
249
258
 
@@ -868,7 +877,7 @@ var OpenAIChatLanguageModel = class {
868
877
  outputTokens: void 0,
869
878
  totalTokens: void 0
870
879
  };
871
- let isFirstChunk = true;
880
+ let metadataExtracted = false;
872
881
  let isActiveText = false;
873
882
  const providerMetadata = { openai: {} };
874
883
  return {
@@ -893,12 +902,15 @@ var OpenAIChatLanguageModel = class {
893
902
  controller.enqueue({ type: "error", error: value.error });
894
903
  return;
895
904
  }
896
- if (isFirstChunk) {
897
- isFirstChunk = false;
898
- controller.enqueue({
899
- type: "response-metadata",
900
- ...getResponseMetadata(value)
901
- });
905
+ if (!metadataExtracted) {
906
+ const metadata = getResponseMetadata(value);
907
+ if (Object.values(metadata).some(Boolean)) {
908
+ metadataExtracted = true;
909
+ controller.enqueue({
910
+ type: "response-metadata",
911
+ ...getResponseMetadata(value)
912
+ });
913
+ }
902
914
  }
903
915
  if (value.usage != null) {
904
916
  usage.inputTokens = (_a = value.usage.prompt_tokens) != null ? _a : void 0;
@@ -1053,9 +1065,6 @@ var OpenAIChatLanguageModel = class {
1053
1065
  };
1054
1066
  }
1055
1067
  };
1056
- function isReasoningModel(modelId) {
1057
- return (modelId.startsWith("o") || modelId.startsWith("gpt-5")) && !modelId.startsWith("gpt-5-chat");
1058
- }
1059
1068
  function supportsFlexProcessing(modelId) {
1060
1069
  return modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
1061
1070
  }
@@ -1063,32 +1072,8 @@ function supportsPriorityProcessing(modelId) {
1063
1072
  return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
1064
1073
  }
1065
1074
  function getSystemMessageMode(modelId) {
1066
- var _a, _b;
1067
- if (!isReasoningModel(modelId)) {
1068
- return "system";
1069
- }
1070
- return (_b = (_a = reasoningModels[modelId]) == null ? void 0 : _a.systemMessageMode) != null ? _b : "developer";
1075
+ return isReasoningModel(modelId) ? "developer" : "system";
1071
1076
  }
1072
- var reasoningModels = {
1073
- o3: {
1074
- systemMessageMode: "developer"
1075
- },
1076
- "o3-2025-04-16": {
1077
- systemMessageMode: "developer"
1078
- },
1079
- "o3-mini": {
1080
- systemMessageMode: "developer"
1081
- },
1082
- "o3-mini-2025-01-31": {
1083
- systemMessageMode: "developer"
1084
- },
1085
- "o4-mini": {
1086
- systemMessageMode: "developer"
1087
- },
1088
- "o4-mini-2025-04-16": {
1089
- systemMessageMode: "developer"
1090
- }
1091
- };
1092
1077
 
1093
1078
  // src/completion/openai-completion-language-model.ts
1094
1079
  var import_provider_utils8 = require("@ai-sdk/provider-utils");
@@ -4183,27 +4168,13 @@ function isErrorChunk(chunk) {
4183
4168
  function getResponsesModelConfig(modelId) {
4184
4169
  const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
4185
4170
  const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
4186
- const defaults = {
4187
- systemMessageMode: "system",
4188
- supportsFlexProcessing: supportsFlexProcessing2,
4189
- supportsPriorityProcessing: supportsPriorityProcessing2
4190
- };
4191
- if (modelId.startsWith("gpt-5-chat")) {
4192
- return {
4193
- ...defaults,
4194
- isReasoningModel: false
4195
- };
4196
- }
4197
- if (modelId.startsWith("o") || modelId.startsWith("gpt-5") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
4198
- return {
4199
- ...defaults,
4200
- isReasoningModel: true,
4201
- systemMessageMode: "developer"
4202
- };
4203
- }
4171
+ const isReasoningModel2 = isReasoningModel(modelId);
4172
+ const systemMessageMode = isReasoningModel2 ? "developer" : "system";
4204
4173
  return {
4205
- ...defaults,
4206
- isReasoningModel: false
4174
+ systemMessageMode,
4175
+ supportsFlexProcessing: supportsFlexProcessing2,
4176
+ supportsPriorityProcessing: supportsPriorityProcessing2,
4177
+ isReasoningModel: isReasoningModel2
4207
4178
  };
4208
4179
  }
4209
4180
  function mapWebSearchOutput(action) {
@@ -4577,7 +4548,7 @@ var OpenAITranscriptionModel = class {
4577
4548
  };
4578
4549
 
4579
4550
  // src/version.ts
4580
- var VERSION = true ? "3.0.0-beta.51" : "0.0.0-test";
4551
+ var VERSION = true ? "3.0.0-beta.53" : "0.0.0-test";
4581
4552
 
4582
4553
  // src/openai-provider.ts
4583
4554
  function createOpenAI(options = {}) {