@ai-sdk/openai 2.0.0-beta.8 → 2.0.0-beta.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +19 -7
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -7
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +19 -7
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +19 -7
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -1906,7 +1906,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1906
1906
|
prompt,
|
|
1907
1907
|
systemMessageMode
|
|
1908
1908
|
}) {
|
|
1909
|
-
var _a, _b, _c, _d, _e;
|
|
1909
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
1910
1910
|
const messages = [];
|
|
1911
1911
|
const warnings = [];
|
|
1912
1912
|
for (const { role, content } of prompt) {
|
|
@@ -1984,7 +1984,8 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1984
1984
|
case "text": {
|
|
1985
1985
|
messages.push({
|
|
1986
1986
|
role: "assistant",
|
|
1987
|
-
content: [{ type: "output_text", text: part.text }]
|
|
1987
|
+
content: [{ type: "output_text", text: part.text }],
|
|
1988
|
+
id: (_c = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId) != null ? _c : void 0
|
|
1988
1989
|
});
|
|
1989
1990
|
break;
|
|
1990
1991
|
}
|
|
@@ -1997,7 +1998,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1997
1998
|
call_id: part.toolCallId,
|
|
1998
1999
|
name: part.toolName,
|
|
1999
2000
|
arguments: JSON.stringify(part.input),
|
|
2000
|
-
id: (
|
|
2001
|
+
id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0
|
|
2001
2002
|
});
|
|
2002
2003
|
break;
|
|
2003
2004
|
}
|
|
@@ -2014,7 +2015,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2014
2015
|
providerOptions: part.providerOptions,
|
|
2015
2016
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
2016
2017
|
});
|
|
2017
|
-
const reasoningId = (
|
|
2018
|
+
const reasoningId = (_g = providerOptions == null ? void 0 : providerOptions.reasoning) == null ? void 0 : _g.id;
|
|
2018
2019
|
if (reasoningId != null) {
|
|
2019
2020
|
const existingReasoningMessage = reasoningMessages[reasoningId];
|
|
2020
2021
|
const summaryParts = [];
|
|
@@ -2030,7 +2031,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2030
2031
|
reasoningMessages[reasoningId] = {
|
|
2031
2032
|
type: "reasoning",
|
|
2032
2033
|
id: reasoningId,
|
|
2033
|
-
encrypted_content: (
|
|
2034
|
+
encrypted_content: (_h = providerOptions == null ? void 0 : providerOptions.reasoning) == null ? void 0 : _h.encryptedContent,
|
|
2034
2035
|
summary: summaryParts
|
|
2035
2036
|
};
|
|
2036
2037
|
messages.push(reasoningMessages[reasoningId]);
|
|
@@ -2377,6 +2378,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2377
2378
|
z14.object({
|
|
2378
2379
|
type: z14.literal("message"),
|
|
2379
2380
|
role: z14.literal("assistant"),
|
|
2381
|
+
id: z14.string(),
|
|
2380
2382
|
content: z14.array(
|
|
2381
2383
|
z14.object({
|
|
2382
2384
|
type: z14.literal("output_text"),
|
|
@@ -2468,7 +2470,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2468
2470
|
for (const contentPart of part.content) {
|
|
2469
2471
|
content.push({
|
|
2470
2472
|
type: "text",
|
|
2471
|
-
text: contentPart.text
|
|
2473
|
+
text: contentPart.text,
|
|
2474
|
+
providerMetadata: {
|
|
2475
|
+
openai: {
|
|
2476
|
+
itemId: part.id
|
|
2477
|
+
}
|
|
2478
|
+
}
|
|
2472
2479
|
});
|
|
2473
2480
|
for (const annotation of contentPart.annotations) {
|
|
2474
2481
|
content.push({
|
|
@@ -2645,7 +2652,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2645
2652
|
} else if (value.item.type === "message") {
|
|
2646
2653
|
controller.enqueue({
|
|
2647
2654
|
type: "text-start",
|
|
2648
|
-
id: value.item.id
|
|
2655
|
+
id: value.item.id,
|
|
2656
|
+
providerMetadata: {
|
|
2657
|
+
openai: {
|
|
2658
|
+
itemId: value.item.id
|
|
2659
|
+
}
|
|
2660
|
+
}
|
|
2649
2661
|
});
|
|
2650
2662
|
} else if (isResponseOutputItemAddedReasoningChunk(value)) {
|
|
2651
2663
|
activeReasoning[value.item.id] = {
|