@ai-sdk/openai 2.0.19 → 2.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +19 -0
- package/dist/index.js +17 -13
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +17 -13
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +17 -13
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +17 -13
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.mjs
CHANGED
|
@@ -430,7 +430,7 @@ var webSearchPreview = createProviderDefinedToolFactory2({
|
|
|
430
430
|
action: z4.discriminatedUnion("type", [
|
|
431
431
|
z4.object({
|
|
432
432
|
type: z4.literal("search"),
|
|
433
|
-
query: z4.string()
|
|
433
|
+
query: z4.string().nullish()
|
|
434
434
|
}),
|
|
435
435
|
z4.object({
|
|
436
436
|
type: z4.literal("open_page"),
|
|
@@ -2174,7 +2174,7 @@ var webSearchCallItem = z13.object({
|
|
|
2174
2174
|
action: z13.discriminatedUnion("type", [
|
|
2175
2175
|
z13.object({
|
|
2176
2176
|
type: z13.literal("search"),
|
|
2177
|
-
query: z13.string()
|
|
2177
|
+
query: z13.string().nullish()
|
|
2178
2178
|
}),
|
|
2179
2179
|
z13.object({
|
|
2180
2180
|
type: z13.literal("open_page"),
|
|
@@ -2702,7 +2702,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2702
2702
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2703
2703
|
},
|
|
2704
2704
|
transform(chunk, controller) {
|
|
2705
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
|
|
2705
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
|
2706
2706
|
if (options.includeRawChunks) {
|
|
2707
2707
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2708
2708
|
}
|
|
@@ -2913,12 +2913,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2913
2913
|
id: value.item_id,
|
|
2914
2914
|
delta: value.delta
|
|
2915
2915
|
});
|
|
2916
|
-
if (value.logprobs) {
|
|
2916
|
+
if (((_d = (_c = options.providerOptions) == null ? void 0 : _c.openai) == null ? void 0 : _d.logprobs) && value.logprobs) {
|
|
2917
2917
|
logprobs.push(value.logprobs);
|
|
2918
2918
|
}
|
|
2919
2919
|
} else if (isResponseReasoningSummaryPartAddedChunk(value)) {
|
|
2920
2920
|
if (value.summary_index > 0) {
|
|
2921
|
-
(
|
|
2921
|
+
(_e = activeReasoning[value.item_id]) == null ? void 0 : _e.summaryParts.push(
|
|
2922
2922
|
value.summary_index
|
|
2923
2923
|
);
|
|
2924
2924
|
controller.enqueue({
|
|
@@ -2927,7 +2927,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2927
2927
|
providerMetadata: {
|
|
2928
2928
|
openai: {
|
|
2929
2929
|
itemId: value.item_id,
|
|
2930
|
-
reasoningEncryptedContent: (
|
|
2930
|
+
reasoningEncryptedContent: (_g = (_f = activeReasoning[value.item_id]) == null ? void 0 : _f.encryptedContent) != null ? _g : null
|
|
2931
2931
|
}
|
|
2932
2932
|
}
|
|
2933
2933
|
});
|
|
@@ -2945,20 +2945,20 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2945
2945
|
});
|
|
2946
2946
|
} else if (isResponseFinishedChunk(value)) {
|
|
2947
2947
|
finishReason = mapOpenAIResponseFinishReason({
|
|
2948
|
-
finishReason: (
|
|
2948
|
+
finishReason: (_h = value.response.incomplete_details) == null ? void 0 : _h.reason,
|
|
2949
2949
|
hasToolCalls
|
|
2950
2950
|
});
|
|
2951
2951
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
2952
2952
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
2953
2953
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
2954
|
-
usage.reasoningTokens = (
|
|
2955
|
-
usage.cachedInputTokens = (
|
|
2954
|
+
usage.reasoningTokens = (_j = (_i = value.response.usage.output_tokens_details) == null ? void 0 : _i.reasoning_tokens) != null ? _j : void 0;
|
|
2955
|
+
usage.cachedInputTokens = (_l = (_k = value.response.usage.input_tokens_details) == null ? void 0 : _k.cached_tokens) != null ? _l : void 0;
|
|
2956
2956
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
2957
2957
|
if (value.annotation.type === "url_citation") {
|
|
2958
2958
|
controller.enqueue({
|
|
2959
2959
|
type: "source",
|
|
2960
2960
|
sourceType: "url",
|
|
2961
|
-
id: (
|
|
2961
|
+
id: (_o = (_n = (_m = self.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : generateId2(),
|
|
2962
2962
|
url: value.annotation.url,
|
|
2963
2963
|
title: value.annotation.title
|
|
2964
2964
|
});
|
|
@@ -2966,7 +2966,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2966
2966
|
controller.enqueue({
|
|
2967
2967
|
type: "source",
|
|
2968
2968
|
sourceType: "document",
|
|
2969
|
-
id: (
|
|
2969
|
+
id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : generateId2(),
|
|
2970
2970
|
mediaType: "text/plain",
|
|
2971
2971
|
title: value.annotation.quote,
|
|
2972
2972
|
filename: value.annotation.file_id
|
|
@@ -3520,8 +3520,12 @@ var OpenAITranscriptionModel = class {
|
|
|
3520
3520
|
include: openAIOptions.include,
|
|
3521
3521
|
language: openAIOptions.language,
|
|
3522
3522
|
prompt: openAIOptions.prompt,
|
|
3523
|
-
response_format
|
|
3524
|
-
//
|
|
3523
|
+
// https://platform.openai.com/docs/api-reference/audio/createTranscription#audio_createtranscription-response_format
|
|
3524
|
+
// prefer verbose_json to get segments for models that support it
|
|
3525
|
+
response_format: [
|
|
3526
|
+
"gpt-4o-transcribe",
|
|
3527
|
+
"gpt-4o-mini-transcribe"
|
|
3528
|
+
].includes(this.modelId) ? "json" : "verbose_json",
|
|
3525
3529
|
temperature: openAIOptions.temperature,
|
|
3526
3530
|
timestamp_granularities: openAIOptions.timestampGranularities
|
|
3527
3531
|
};
|