@ai-sdk/openai 3.0.0-beta.52 → 3.0.0-beta.54
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +18 -11
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -11
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +17 -10
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +17 -10
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -233,7 +233,7 @@ function getResponseMetadata({
|
|
|
233
233
|
return {
|
|
234
234
|
id: id != null ? id : void 0,
|
|
235
235
|
modelId: model != null ? model : void 0,
|
|
236
|
-
timestamp: created
|
|
236
|
+
timestamp: created ? new Date(created * 1e3) : void 0
|
|
237
237
|
};
|
|
238
238
|
}
|
|
239
239
|
|
|
@@ -860,7 +860,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
860
860
|
outputTokens: void 0,
|
|
861
861
|
totalTokens: void 0
|
|
862
862
|
};
|
|
863
|
-
let
|
|
863
|
+
let metadataExtracted = false;
|
|
864
864
|
let isActiveText = false;
|
|
865
865
|
const providerMetadata = { openai: {} };
|
|
866
866
|
return {
|
|
@@ -885,12 +885,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
885
885
|
controller.enqueue({ type: "error", error: value.error });
|
|
886
886
|
return;
|
|
887
887
|
}
|
|
888
|
-
if (
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
888
|
+
if (!metadataExtracted) {
|
|
889
|
+
const metadata = getResponseMetadata(value);
|
|
890
|
+
if (Object.values(metadata).some(Boolean)) {
|
|
891
|
+
metadataExtracted = true;
|
|
892
|
+
controller.enqueue({
|
|
893
|
+
type: "response-metadata",
|
|
894
|
+
...getResponseMetadata(value)
|
|
895
|
+
});
|
|
896
|
+
}
|
|
894
897
|
}
|
|
895
898
|
if (value.usage != null) {
|
|
896
899
|
usage.inputTokens = (_a = value.usage.prompt_tokens) != null ? _a : void 0;
|
|
@@ -2934,12 +2937,16 @@ var TOP_LOGPROBS_MAX = 20;
|
|
|
2934
2937
|
var openaiResponsesReasoningModelIds = [
|
|
2935
2938
|
"o1",
|
|
2936
2939
|
"o1-2024-12-17",
|
|
2937
|
-
"o3-mini",
|
|
2938
|
-
"o3-mini-2025-01-31",
|
|
2939
2940
|
"o3",
|
|
2940
2941
|
"o3-2025-04-16",
|
|
2942
|
+
"o3-deep-research",
|
|
2943
|
+
"o3-deep-research-2025-06-26",
|
|
2944
|
+
"o3-mini",
|
|
2945
|
+
"o3-mini-2025-01-31",
|
|
2941
2946
|
"o4-mini",
|
|
2942
2947
|
"o4-mini-2025-04-16",
|
|
2948
|
+
"o4-mini-deep-research",
|
|
2949
|
+
"o4-mini-deep-research-2025-06-26",
|
|
2943
2950
|
"codex-mini-latest",
|
|
2944
2951
|
"computer-use-preview",
|
|
2945
2952
|
"gpt-5",
|