@ai-sdk/openai 2.0.62 → 2.0.64
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +18 -11
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -11
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +17 -10
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +17 -10
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -228,7 +228,7 @@ function getResponseMetadata({
|
|
|
228
228
|
return {
|
|
229
229
|
id: id != null ? id : void 0,
|
|
230
230
|
modelId: model != null ? model : void 0,
|
|
231
|
-
timestamp: created
|
|
231
|
+
timestamp: created ? new Date(created * 1e3) : void 0
|
|
232
232
|
};
|
|
233
233
|
}
|
|
234
234
|
|
|
@@ -861,7 +861,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
861
861
|
outputTokens: void 0,
|
|
862
862
|
totalTokens: void 0
|
|
863
863
|
};
|
|
864
|
-
let
|
|
864
|
+
let metadataExtracted = false;
|
|
865
865
|
let isActiveText = false;
|
|
866
866
|
const providerMetadata = { openai: {} };
|
|
867
867
|
return {
|
|
@@ -886,12 +886,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
886
886
|
controller.enqueue({ type: "error", error: value.error });
|
|
887
887
|
return;
|
|
888
888
|
}
|
|
889
|
-
if (
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
889
|
+
if (!metadataExtracted) {
|
|
890
|
+
const metadata = getResponseMetadata(value);
|
|
891
|
+
if (Object.values(metadata).some(Boolean)) {
|
|
892
|
+
metadataExtracted = true;
|
|
893
|
+
controller.enqueue({
|
|
894
|
+
type: "response-metadata",
|
|
895
|
+
...getResponseMetadata(value)
|
|
896
|
+
});
|
|
897
|
+
}
|
|
895
898
|
}
|
|
896
899
|
if (value.usage != null) {
|
|
897
900
|
usage.inputTokens = (_a = value.usage.prompt_tokens) != null ? _a : void 0;
|
|
@@ -2891,12 +2894,16 @@ var TOP_LOGPROBS_MAX = 20;
|
|
|
2891
2894
|
var openaiResponsesReasoningModelIds = [
|
|
2892
2895
|
"o1",
|
|
2893
2896
|
"o1-2024-12-17",
|
|
2894
|
-
"o3-mini",
|
|
2895
|
-
"o3-mini-2025-01-31",
|
|
2896
2897
|
"o3",
|
|
2897
2898
|
"o3-2025-04-16",
|
|
2899
|
+
"o3-deep-research",
|
|
2900
|
+
"o3-deep-research-2025-06-26",
|
|
2901
|
+
"o3-mini",
|
|
2902
|
+
"o3-mini-2025-01-31",
|
|
2898
2903
|
"o4-mini",
|
|
2899
2904
|
"o4-mini-2025-04-16",
|
|
2905
|
+
"o4-mini-deep-research",
|
|
2906
|
+
"o4-mini-deep-research-2025-06-26",
|
|
2900
2907
|
"codex-mini-latest",
|
|
2901
2908
|
"computer-use-preview",
|
|
2902
2909
|
"gpt-5",
|
|
@@ -4544,7 +4551,7 @@ var OpenAITranscriptionModel = class {
|
|
|
4544
4551
|
};
|
|
4545
4552
|
|
|
4546
4553
|
// src/version.ts
|
|
4547
|
-
var VERSION = true ? "2.0.
|
|
4554
|
+
var VERSION = true ? "2.0.64" : "0.0.0-test";
|
|
4548
4555
|
|
|
4549
4556
|
// src/openai-provider.ts
|
|
4550
4557
|
function createOpenAI(options = {}) {
|