@ai-sdk/openai 2.0.69 → 2.0.70
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +18 -7
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -7
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +17 -6
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +17 -6
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.js
CHANGED
|
@@ -3948,6 +3948,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3948
3948
|
const logprobs = [];
|
|
3949
3949
|
let responseId = null;
|
|
3950
3950
|
const ongoingToolCalls = {};
|
|
3951
|
+
const ongoingAnnotations = [];
|
|
3951
3952
|
let hasFunctionCall = false;
|
|
3952
3953
|
const activeReasoning = {};
|
|
3953
3954
|
let serviceTier;
|
|
@@ -4048,6 +4049,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4048
4049
|
providerExecuted: true
|
|
4049
4050
|
});
|
|
4050
4051
|
} else if (value.item.type === "message") {
|
|
4052
|
+
ongoingAnnotations.splice(0, ongoingAnnotations.length);
|
|
4051
4053
|
controller.enqueue({
|
|
4052
4054
|
type: "text-start",
|
|
4053
4055
|
id: value.item.id,
|
|
@@ -4073,7 +4075,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4073
4075
|
}
|
|
4074
4076
|
});
|
|
4075
4077
|
}
|
|
4076
|
-
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
4078
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type !== "message") {
|
|
4077
4079
|
if (value.item.type === "function_call") {
|
|
4078
4080
|
ongoingToolCalls[value.output_index] = void 0;
|
|
4079
4081
|
hasFunctionCall = true;
|
|
@@ -4183,11 +4185,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4183
4185
|
openai: { itemId: value.item.id }
|
|
4184
4186
|
}
|
|
4185
4187
|
});
|
|
4186
|
-
} else if (value.item.type === "message") {
|
|
4187
|
-
controller.enqueue({
|
|
4188
|
-
type: "text-end",
|
|
4189
|
-
id: value.item.id
|
|
4190
|
-
});
|
|
4191
4188
|
} else if (value.item.type === "reasoning") {
|
|
4192
4189
|
const activeReasoningPart = activeReasoning[value.item.id];
|
|
4193
4190
|
const summaryPartIndices = Object.entries(
|
|
@@ -4334,6 +4331,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4334
4331
|
serviceTier = value.response.service_tier;
|
|
4335
4332
|
}
|
|
4336
4333
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
4334
|
+
ongoingAnnotations.push(value.annotation);
|
|
4337
4335
|
if (value.annotation.type === "url_citation") {
|
|
4338
4336
|
controller.enqueue({
|
|
4339
4337
|
type: "source",
|
|
@@ -4359,6 +4357,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4359
4357
|
} : {}
|
|
4360
4358
|
});
|
|
4361
4359
|
}
|
|
4360
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type === "message") {
|
|
4361
|
+
controller.enqueue({
|
|
4362
|
+
type: "text-end",
|
|
4363
|
+
id: value.item.id,
|
|
4364
|
+
providerMetadata: {
|
|
4365
|
+
openai: {
|
|
4366
|
+
itemId: value.item.id,
|
|
4367
|
+
...ongoingAnnotations.length > 0 && {
|
|
4368
|
+
annotations: ongoingAnnotations
|
|
4369
|
+
}
|
|
4370
|
+
}
|
|
4371
|
+
}
|
|
4372
|
+
});
|
|
4362
4373
|
} else if (isErrorChunk(value)) {
|
|
4363
4374
|
controller.enqueue({ type: "error", error: value });
|
|
4364
4375
|
}
|