@ai-sdk/openai 2.0.69 → 2.0.70
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +18 -7
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -7
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +17 -6
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +17 -6
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -4012,6 +4012,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4012
4012
|
const logprobs = [];
|
|
4013
4013
|
let responseId = null;
|
|
4014
4014
|
const ongoingToolCalls = {};
|
|
4015
|
+
const ongoingAnnotations = [];
|
|
4015
4016
|
let hasFunctionCall = false;
|
|
4016
4017
|
const activeReasoning = {};
|
|
4017
4018
|
let serviceTier;
|
|
@@ -4112,6 +4113,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4112
4113
|
providerExecuted: true
|
|
4113
4114
|
});
|
|
4114
4115
|
} else if (value.item.type === "message") {
|
|
4116
|
+
ongoingAnnotations.splice(0, ongoingAnnotations.length);
|
|
4115
4117
|
controller.enqueue({
|
|
4116
4118
|
type: "text-start",
|
|
4117
4119
|
id: value.item.id,
|
|
@@ -4137,7 +4139,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4137
4139
|
}
|
|
4138
4140
|
});
|
|
4139
4141
|
}
|
|
4140
|
-
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
4142
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type !== "message") {
|
|
4141
4143
|
if (value.item.type === "function_call") {
|
|
4142
4144
|
ongoingToolCalls[value.output_index] = void 0;
|
|
4143
4145
|
hasFunctionCall = true;
|
|
@@ -4247,11 +4249,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4247
4249
|
openai: { itemId: value.item.id }
|
|
4248
4250
|
}
|
|
4249
4251
|
});
|
|
4250
|
-
} else if (value.item.type === "message") {
|
|
4251
|
-
controller.enqueue({
|
|
4252
|
-
type: "text-end",
|
|
4253
|
-
id: value.item.id
|
|
4254
|
-
});
|
|
4255
4252
|
} else if (value.item.type === "reasoning") {
|
|
4256
4253
|
const activeReasoningPart = activeReasoning[value.item.id];
|
|
4257
4254
|
const summaryPartIndices = Object.entries(
|
|
@@ -4398,6 +4395,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4398
4395
|
serviceTier = value.response.service_tier;
|
|
4399
4396
|
}
|
|
4400
4397
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
4398
|
+
ongoingAnnotations.push(value.annotation);
|
|
4401
4399
|
if (value.annotation.type === "url_citation") {
|
|
4402
4400
|
controller.enqueue({
|
|
4403
4401
|
type: "source",
|
|
@@ -4423,6 +4421,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4423
4421
|
} : {}
|
|
4424
4422
|
});
|
|
4425
4423
|
}
|
|
4424
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type === "message") {
|
|
4425
|
+
controller.enqueue({
|
|
4426
|
+
type: "text-end",
|
|
4427
|
+
id: value.item.id,
|
|
4428
|
+
providerMetadata: {
|
|
4429
|
+
openai: {
|
|
4430
|
+
itemId: value.item.id,
|
|
4431
|
+
...ongoingAnnotations.length > 0 && {
|
|
4432
|
+
annotations: ongoingAnnotations
|
|
4433
|
+
}
|
|
4434
|
+
}
|
|
4435
|
+
}
|
|
4436
|
+
});
|
|
4426
4437
|
} else if (isErrorChunk(value)) {
|
|
4427
4438
|
controller.enqueue({ type: "error", error: value });
|
|
4428
4439
|
}
|