@ai-sdk/openai 3.0.0-beta.62 → 3.0.0-beta.64
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.js +18 -7
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -7
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +17 -6
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +17 -6
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -4084,6 +4084,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4084
4084
|
const logprobs = [];
|
|
4085
4085
|
let responseId = null;
|
|
4086
4086
|
const ongoingToolCalls = {};
|
|
4087
|
+
const ongoingAnnotations = [];
|
|
4087
4088
|
let hasFunctionCall = false;
|
|
4088
4089
|
const activeReasoning = {};
|
|
4089
4090
|
let serviceTier;
|
|
@@ -4192,6 +4193,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4192
4193
|
providerExecuted: true
|
|
4193
4194
|
});
|
|
4194
4195
|
} else if (value.item.type === "message") {
|
|
4196
|
+
ongoingAnnotations.splice(0, ongoingAnnotations.length);
|
|
4195
4197
|
controller.enqueue({
|
|
4196
4198
|
type: "text-start",
|
|
4197
4199
|
id: value.item.id,
|
|
@@ -4217,7 +4219,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4217
4219
|
}
|
|
4218
4220
|
});
|
|
4219
4221
|
}
|
|
4220
|
-
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
4222
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type !== "message") {
|
|
4221
4223
|
if (value.item.type === "function_call") {
|
|
4222
4224
|
ongoingToolCalls[value.output_index] = void 0;
|
|
4223
4225
|
hasFunctionCall = true;
|
|
@@ -4372,11 +4374,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4372
4374
|
openai: { itemId: value.item.id }
|
|
4373
4375
|
}
|
|
4374
4376
|
});
|
|
4375
|
-
} else if (value.item.type === "message") {
|
|
4376
|
-
controller.enqueue({
|
|
4377
|
-
type: "text-end",
|
|
4378
|
-
id: value.item.id
|
|
4379
|
-
});
|
|
4380
4377
|
} else if (value.item.type === "reasoning") {
|
|
4381
4378
|
const activeReasoningPart = activeReasoning[value.item.id];
|
|
4382
4379
|
const summaryPartIndices = Object.entries(
|
|
@@ -4533,6 +4530,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4533
4530
|
serviceTier = value.response.service_tier;
|
|
4534
4531
|
}
|
|
4535
4532
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
4533
|
+
ongoingAnnotations.push(value.annotation);
|
|
4536
4534
|
if (value.annotation.type === "url_citation") {
|
|
4537
4535
|
controller.enqueue({
|
|
4538
4536
|
type: "source",
|
|
@@ -4589,6 +4587,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4589
4587
|
}
|
|
4590
4588
|
});
|
|
4591
4589
|
}
|
|
4590
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type === "message") {
|
|
4591
|
+
controller.enqueue({
|
|
4592
|
+
type: "text-end",
|
|
4593
|
+
id: value.item.id,
|
|
4594
|
+
providerMetadata: {
|
|
4595
|
+
openai: {
|
|
4596
|
+
itemId: value.item.id,
|
|
4597
|
+
...ongoingAnnotations.length > 0 && {
|
|
4598
|
+
annotations: ongoingAnnotations
|
|
4599
|
+
}
|
|
4600
|
+
}
|
|
4601
|
+
}
|
|
4602
|
+
});
|
|
4592
4603
|
} else if (isErrorChunk(value)) {
|
|
4593
4604
|
controller.enqueue({ type: "error", error: value });
|
|
4594
4605
|
}
|
|
@@ -5047,7 +5058,7 @@ var OpenAITranscriptionModel = class {
|
|
|
5047
5058
|
};
|
|
5048
5059
|
|
|
5049
5060
|
// src/version.ts
|
|
5050
|
-
var VERSION = true ? "3.0.0-beta.
|
|
5061
|
+
var VERSION = true ? "3.0.0-beta.64" : "0.0.0-test";
|
|
5051
5062
|
|
|
5052
5063
|
// src/openai-provider.ts
|
|
5053
5064
|
function createOpenAI(options = {}) {
|