@ai-sdk/openai 2.0.68 → 2.0.70
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +59 -26
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +59 -26
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +58 -25
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +58 -25
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.js
CHANGED
|
@@ -2715,10 +2715,13 @@ var openaiResponsesChunkSchema = (0, import_provider_utils21.lazyValidator)(
|
|
|
2715
2715
|
}),
|
|
2716
2716
|
import_v414.z.object({
|
|
2717
2717
|
type: import_v414.z.literal("error"),
|
|
2718
|
-
|
|
2719
|
-
|
|
2720
|
-
|
|
2721
|
-
|
|
2718
|
+
sequence_number: import_v414.z.number(),
|
|
2719
|
+
error: import_v414.z.object({
|
|
2720
|
+
type: import_v414.z.string(),
|
|
2721
|
+
code: import_v414.z.string(),
|
|
2722
|
+
message: import_v414.z.string(),
|
|
2723
|
+
param: import_v414.z.string().nullish()
|
|
2724
|
+
})
|
|
2722
2725
|
}),
|
|
2723
2726
|
import_v414.z.object({ type: import_v414.z.string() }).loose().transform((value) => ({
|
|
2724
2727
|
type: "unknown_chunk",
|
|
@@ -2731,13 +2734,15 @@ var openaiResponsesChunkSchema = (0, import_provider_utils21.lazyValidator)(
|
|
|
2731
2734
|
var openaiResponsesResponseSchema = (0, import_provider_utils21.lazyValidator)(
|
|
2732
2735
|
() => (0, import_provider_utils21.zodSchema)(
|
|
2733
2736
|
import_v414.z.object({
|
|
2734
|
-
id: import_v414.z.string(),
|
|
2735
|
-
created_at: import_v414.z.number(),
|
|
2737
|
+
id: import_v414.z.string().optional(),
|
|
2738
|
+
created_at: import_v414.z.number().optional(),
|
|
2736
2739
|
error: import_v414.z.object({
|
|
2737
|
-
|
|
2738
|
-
|
|
2740
|
+
message: import_v414.z.string(),
|
|
2741
|
+
type: import_v414.z.string(),
|
|
2742
|
+
param: import_v414.z.string().nullish(),
|
|
2743
|
+
code: import_v414.z.string()
|
|
2739
2744
|
}).nullish(),
|
|
2740
|
-
model: import_v414.z.string(),
|
|
2745
|
+
model: import_v414.z.string().optional(),
|
|
2741
2746
|
output: import_v414.z.array(
|
|
2742
2747
|
import_v414.z.discriminatedUnion("type", [
|
|
2743
2748
|
import_v414.z.object({
|
|
@@ -2779,7 +2784,18 @@ var openaiResponsesResponseSchema = (0, import_provider_utils21.lazyValidator)(
|
|
|
2779
2784
|
quote: import_v414.z.string().nullish()
|
|
2780
2785
|
}),
|
|
2781
2786
|
import_v414.z.object({
|
|
2782
|
-
type: import_v414.z.literal("container_file_citation")
|
|
2787
|
+
type: import_v414.z.literal("container_file_citation"),
|
|
2788
|
+
container_id: import_v414.z.string(),
|
|
2789
|
+
file_id: import_v414.z.string(),
|
|
2790
|
+
filename: import_v414.z.string().nullish(),
|
|
2791
|
+
start_index: import_v414.z.number().nullish(),
|
|
2792
|
+
end_index: import_v414.z.number().nullish(),
|
|
2793
|
+
index: import_v414.z.number().nullish()
|
|
2794
|
+
}),
|
|
2795
|
+
import_v414.z.object({
|
|
2796
|
+
type: import_v414.z.literal("file_path"),
|
|
2797
|
+
file_id: import_v414.z.string(),
|
|
2798
|
+
index: import_v414.z.number().nullish()
|
|
2783
2799
|
})
|
|
2784
2800
|
])
|
|
2785
2801
|
)
|
|
@@ -2818,7 +2834,10 @@ var openaiResponsesResponseSchema = (0, import_provider_utils21.lazyValidator)(
|
|
|
2818
2834
|
queries: import_v414.z.array(import_v414.z.string()),
|
|
2819
2835
|
results: import_v414.z.array(
|
|
2820
2836
|
import_v414.z.object({
|
|
2821
|
-
attributes: import_v414.z.record(
|
|
2837
|
+
attributes: import_v414.z.record(
|
|
2838
|
+
import_v414.z.string(),
|
|
2839
|
+
import_v414.z.union([import_v414.z.string(), import_v414.z.number(), import_v414.z.boolean()])
|
|
2840
|
+
),
|
|
2822
2841
|
file_id: import_v414.z.string(),
|
|
2823
2842
|
filename: import_v414.z.string(),
|
|
2824
2843
|
score: import_v414.z.number(),
|
|
@@ -2880,7 +2899,7 @@ var openaiResponsesResponseSchema = (0, import_provider_utils21.lazyValidator)(
|
|
|
2880
2899
|
)
|
|
2881
2900
|
})
|
|
2882
2901
|
])
|
|
2883
|
-
),
|
|
2902
|
+
).optional(),
|
|
2884
2903
|
service_tier: import_v414.z.string().nullish(),
|
|
2885
2904
|
incomplete_details: import_v414.z.object({ reason: import_v414.z.string() }).nullish(),
|
|
2886
2905
|
usage: import_v414.z.object({
|
|
@@ -2888,7 +2907,7 @@ var openaiResponsesResponseSchema = (0, import_provider_utils21.lazyValidator)(
|
|
|
2888
2907
|
input_tokens_details: import_v414.z.object({ cached_tokens: import_v414.z.number().nullish() }).nullish(),
|
|
2889
2908
|
output_tokens: import_v414.z.number(),
|
|
2890
2909
|
output_tokens_details: import_v414.z.object({ reasoning_tokens: import_v414.z.number().nullish() }).nullish()
|
|
2891
|
-
})
|
|
2910
|
+
}).optional()
|
|
2892
2911
|
})
|
|
2893
2912
|
)
|
|
2894
2913
|
);
|
|
@@ -3859,7 +3878,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3859
3878
|
}
|
|
3860
3879
|
}
|
|
3861
3880
|
const providerMetadata = {
|
|
3862
|
-
openai: {
|
|
3881
|
+
openai: {
|
|
3882
|
+
...response.id != null ? { responseId: response.id } : {}
|
|
3883
|
+
}
|
|
3863
3884
|
};
|
|
3864
3885
|
if (logprobs.length > 0) {
|
|
3865
3886
|
providerMetadata.openai.logprobs = logprobs;
|
|
@@ -3867,6 +3888,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3867
3888
|
if (typeof response.service_tier === "string") {
|
|
3868
3889
|
providerMetadata.openai.serviceTier = response.service_tier;
|
|
3869
3890
|
}
|
|
3891
|
+
const usage = response.usage;
|
|
3870
3892
|
return {
|
|
3871
3893
|
content,
|
|
3872
3894
|
finishReason: mapOpenAIResponseFinishReason({
|
|
@@ -3874,11 +3896,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3874
3896
|
hasFunctionCall
|
|
3875
3897
|
}),
|
|
3876
3898
|
usage: {
|
|
3877
|
-
inputTokens:
|
|
3878
|
-
outputTokens:
|
|
3879
|
-
totalTokens:
|
|
3880
|
-
reasoningTokens: (_q = (_p =
|
|
3881
|
-
cachedInputTokens: (_s = (_r =
|
|
3899
|
+
inputTokens: usage.input_tokens,
|
|
3900
|
+
outputTokens: usage.output_tokens,
|
|
3901
|
+
totalTokens: usage.input_tokens + usage.output_tokens,
|
|
3902
|
+
reasoningTokens: (_q = (_p = usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
|
|
3903
|
+
cachedInputTokens: (_s = (_r = usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
|
|
3882
3904
|
},
|
|
3883
3905
|
request: { body },
|
|
3884
3906
|
response: {
|
|
@@ -3926,6 +3948,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3926
3948
|
const logprobs = [];
|
|
3927
3949
|
let responseId = null;
|
|
3928
3950
|
const ongoingToolCalls = {};
|
|
3951
|
+
const ongoingAnnotations = [];
|
|
3929
3952
|
let hasFunctionCall = false;
|
|
3930
3953
|
const activeReasoning = {};
|
|
3931
3954
|
let serviceTier;
|
|
@@ -4026,6 +4049,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4026
4049
|
providerExecuted: true
|
|
4027
4050
|
});
|
|
4028
4051
|
} else if (value.item.type === "message") {
|
|
4052
|
+
ongoingAnnotations.splice(0, ongoingAnnotations.length);
|
|
4029
4053
|
controller.enqueue({
|
|
4030
4054
|
type: "text-start",
|
|
4031
4055
|
id: value.item.id,
|
|
@@ -4051,7 +4075,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4051
4075
|
}
|
|
4052
4076
|
});
|
|
4053
4077
|
}
|
|
4054
|
-
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
4078
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type !== "message") {
|
|
4055
4079
|
if (value.item.type === "function_call") {
|
|
4056
4080
|
ongoingToolCalls[value.output_index] = void 0;
|
|
4057
4081
|
hasFunctionCall = true;
|
|
@@ -4161,11 +4185,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4161
4185
|
openai: { itemId: value.item.id }
|
|
4162
4186
|
}
|
|
4163
4187
|
});
|
|
4164
|
-
} else if (value.item.type === "message") {
|
|
4165
|
-
controller.enqueue({
|
|
4166
|
-
type: "text-end",
|
|
4167
|
-
id: value.item.id
|
|
4168
|
-
});
|
|
4169
4188
|
} else if (value.item.type === "reasoning") {
|
|
4170
4189
|
const activeReasoningPart = activeReasoning[value.item.id];
|
|
4171
4190
|
const summaryPartIndices = Object.entries(
|
|
@@ -4312,6 +4331,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4312
4331
|
serviceTier = value.response.service_tier;
|
|
4313
4332
|
}
|
|
4314
4333
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
4334
|
+
ongoingAnnotations.push(value.annotation);
|
|
4315
4335
|
if (value.annotation.type === "url_citation") {
|
|
4316
4336
|
controller.enqueue({
|
|
4317
4337
|
type: "source",
|
|
@@ -4337,6 +4357,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4337
4357
|
} : {}
|
|
4338
4358
|
});
|
|
4339
4359
|
}
|
|
4360
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type === "message") {
|
|
4361
|
+
controller.enqueue({
|
|
4362
|
+
type: "text-end",
|
|
4363
|
+
id: value.item.id,
|
|
4364
|
+
providerMetadata: {
|
|
4365
|
+
openai: {
|
|
4366
|
+
itemId: value.item.id,
|
|
4367
|
+
...ongoingAnnotations.length > 0 && {
|
|
4368
|
+
annotations: ongoingAnnotations
|
|
4369
|
+
}
|
|
4370
|
+
}
|
|
4371
|
+
}
|
|
4372
|
+
});
|
|
4340
4373
|
} else if (isErrorChunk(value)) {
|
|
4341
4374
|
controller.enqueue({ type: "error", error: value });
|
|
4342
4375
|
}
|