@ai-sdk/openai 2.0.69 → 2.0.71
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +22 -9
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +22 -9
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +84 -1
- package/dist/internal/index.d.ts +84 -1
- package/dist/internal/index.js +29 -10
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +25 -9
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# @ai-sdk/openai
|
|
2
2
|
|
|
3
|
+
## 2.0.71
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 29a5595: Azure OpenAI enabled web-search-preview
|
|
8
|
+
|
|
9
|
+
## 2.0.70
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- dafda29: Set the annotations from the Responses API to doStream
|
|
14
|
+
|
|
3
15
|
## 2.0.69
|
|
4
16
|
|
|
5
17
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -2616,6 +2616,8 @@ var openaiResponsesChunkSchema = (0, import_provider_utils21.lazyValidator)(
|
|
|
2616
2616
|
annotation: import_v416.z.discriminatedUnion("type", [
|
|
2617
2617
|
import_v416.z.object({
|
|
2618
2618
|
type: import_v416.z.literal("url_citation"),
|
|
2619
|
+
start_index: import_v416.z.number(),
|
|
2620
|
+
end_index: import_v416.z.number(),
|
|
2619
2621
|
url: import_v416.z.string(),
|
|
2620
2622
|
title: import_v416.z.string()
|
|
2621
2623
|
}),
|
|
@@ -3647,6 +3649,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3647
3649
|
const logprobs = [];
|
|
3648
3650
|
let responseId = null;
|
|
3649
3651
|
const ongoingToolCalls = {};
|
|
3652
|
+
const ongoingAnnotations = [];
|
|
3650
3653
|
let hasFunctionCall = false;
|
|
3651
3654
|
const activeReasoning = {};
|
|
3652
3655
|
let serviceTier;
|
|
@@ -3696,7 +3699,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3696
3699
|
controller.enqueue({
|
|
3697
3700
|
type: "tool-call",
|
|
3698
3701
|
toolCallId: value.item.id,
|
|
3699
|
-
toolName: "web_search",
|
|
3702
|
+
toolName: webSearchToolName != null ? webSearchToolName : "web_search",
|
|
3700
3703
|
input: JSON.stringify({}),
|
|
3701
3704
|
providerExecuted: true
|
|
3702
3705
|
});
|
|
@@ -3747,6 +3750,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3747
3750
|
providerExecuted: true
|
|
3748
3751
|
});
|
|
3749
3752
|
} else if (value.item.type === "message") {
|
|
3753
|
+
ongoingAnnotations.splice(0, ongoingAnnotations.length);
|
|
3750
3754
|
controller.enqueue({
|
|
3751
3755
|
type: "text-start",
|
|
3752
3756
|
id: value.item.id,
|
|
@@ -3772,7 +3776,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3772
3776
|
}
|
|
3773
3777
|
});
|
|
3774
3778
|
}
|
|
3775
|
-
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
3779
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type !== "message") {
|
|
3776
3780
|
if (value.item.type === "function_call") {
|
|
3777
3781
|
ongoingToolCalls[value.output_index] = void 0;
|
|
3778
3782
|
hasFunctionCall = true;
|
|
@@ -3796,7 +3800,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3796
3800
|
controller.enqueue({
|
|
3797
3801
|
type: "tool-result",
|
|
3798
3802
|
toolCallId: value.item.id,
|
|
3799
|
-
toolName: "web_search",
|
|
3803
|
+
toolName: webSearchToolName != null ? webSearchToolName : "web_search",
|
|
3800
3804
|
result: mapWebSearchOutput(value.item.action),
|
|
3801
3805
|
providerExecuted: true
|
|
3802
3806
|
});
|
|
@@ -3882,11 +3886,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3882
3886
|
openai: { itemId: value.item.id }
|
|
3883
3887
|
}
|
|
3884
3888
|
});
|
|
3885
|
-
} else if (value.item.type === "message") {
|
|
3886
|
-
controller.enqueue({
|
|
3887
|
-
type: "text-end",
|
|
3888
|
-
id: value.item.id
|
|
3889
|
-
});
|
|
3890
3889
|
} else if (value.item.type === "reasoning") {
|
|
3891
3890
|
const activeReasoningPart = activeReasoning[value.item.id];
|
|
3892
3891
|
const summaryPartIndices = Object.entries(
|
|
@@ -4033,6 +4032,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4033
4032
|
serviceTier = value.response.service_tier;
|
|
4034
4033
|
}
|
|
4035
4034
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
4035
|
+
ongoingAnnotations.push(value.annotation);
|
|
4036
4036
|
if (value.annotation.type === "url_citation") {
|
|
4037
4037
|
controller.enqueue({
|
|
4038
4038
|
type: "source",
|
|
@@ -4058,6 +4058,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4058
4058
|
} : {}
|
|
4059
4059
|
});
|
|
4060
4060
|
}
|
|
4061
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type === "message") {
|
|
4062
|
+
controller.enqueue({
|
|
4063
|
+
type: "text-end",
|
|
4064
|
+
id: value.item.id,
|
|
4065
|
+
providerMetadata: {
|
|
4066
|
+
openai: {
|
|
4067
|
+
itemId: value.item.id,
|
|
4068
|
+
...ongoingAnnotations.length > 0 && {
|
|
4069
|
+
annotations: ongoingAnnotations
|
|
4070
|
+
}
|
|
4071
|
+
}
|
|
4072
|
+
}
|
|
4073
|
+
});
|
|
4061
4074
|
} else if (isErrorChunk(value)) {
|
|
4062
4075
|
controller.enqueue({ type: "error", error: value });
|
|
4063
4076
|
}
|
|
@@ -4515,7 +4528,7 @@ var OpenAITranscriptionModel = class {
|
|
|
4515
4528
|
};
|
|
4516
4529
|
|
|
4517
4530
|
// src/version.ts
|
|
4518
|
-
var VERSION = true ? "2.0.
|
|
4531
|
+
var VERSION = true ? "2.0.71" : "0.0.0-test";
|
|
4519
4532
|
|
|
4520
4533
|
// src/openai-provider.ts
|
|
4521
4534
|
function createOpenAI(options = {}) {
|