@ai-sdk/openai 2.0.0-beta.8 → 2.0.0-beta.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +19 -7
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -7
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +19 -7
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +19 -7
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
package/dist/index.js
CHANGED
|
@@ -1879,7 +1879,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1879
1879
|
prompt,
|
|
1880
1880
|
systemMessageMode
|
|
1881
1881
|
}) {
|
|
1882
|
-
var _a, _b, _c, _d, _e;
|
|
1882
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
1883
1883
|
const messages = [];
|
|
1884
1884
|
const warnings = [];
|
|
1885
1885
|
for (const { role, content } of prompt) {
|
|
@@ -1957,7 +1957,8 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1957
1957
|
case "text": {
|
|
1958
1958
|
messages.push({
|
|
1959
1959
|
role: "assistant",
|
|
1960
|
-
content: [{ type: "output_text", text: part.text }]
|
|
1960
|
+
content: [{ type: "output_text", text: part.text }],
|
|
1961
|
+
id: (_c = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId) != null ? _c : void 0
|
|
1961
1962
|
});
|
|
1962
1963
|
break;
|
|
1963
1964
|
}
|
|
@@ -1970,7 +1971,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1970
1971
|
call_id: part.toolCallId,
|
|
1971
1972
|
name: part.toolName,
|
|
1972
1973
|
arguments: JSON.stringify(part.input),
|
|
1973
|
-
id: (
|
|
1974
|
+
id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0
|
|
1974
1975
|
});
|
|
1975
1976
|
break;
|
|
1976
1977
|
}
|
|
@@ -1987,7 +1988,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1987
1988
|
providerOptions: part.providerOptions,
|
|
1988
1989
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
1989
1990
|
});
|
|
1990
|
-
const reasoningId = (
|
|
1991
|
+
const reasoningId = (_g = providerOptions == null ? void 0 : providerOptions.reasoning) == null ? void 0 : _g.id;
|
|
1991
1992
|
if (reasoningId != null) {
|
|
1992
1993
|
const existingReasoningMessage = reasoningMessages[reasoningId];
|
|
1993
1994
|
const summaryParts = [];
|
|
@@ -2003,7 +2004,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2003
2004
|
reasoningMessages[reasoningId] = {
|
|
2004
2005
|
type: "reasoning",
|
|
2005
2006
|
id: reasoningId,
|
|
2006
|
-
encrypted_content: (
|
|
2007
|
+
encrypted_content: (_h = providerOptions == null ? void 0 : providerOptions.reasoning) == null ? void 0 : _h.encryptedContent,
|
|
2007
2008
|
summary: summaryParts
|
|
2008
2009
|
};
|
|
2009
2010
|
messages.push(reasoningMessages[reasoningId]);
|
|
@@ -2348,6 +2349,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2348
2349
|
import_v414.z.object({
|
|
2349
2350
|
type: import_v414.z.literal("message"),
|
|
2350
2351
|
role: import_v414.z.literal("assistant"),
|
|
2352
|
+
id: import_v414.z.string(),
|
|
2351
2353
|
content: import_v414.z.array(
|
|
2352
2354
|
import_v414.z.object({
|
|
2353
2355
|
type: import_v414.z.literal("output_text"),
|
|
@@ -2439,7 +2441,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2439
2441
|
for (const contentPart of part.content) {
|
|
2440
2442
|
content.push({
|
|
2441
2443
|
type: "text",
|
|
2442
|
-
text: contentPart.text
|
|
2444
|
+
text: contentPart.text,
|
|
2445
|
+
providerMetadata: {
|
|
2446
|
+
openai: {
|
|
2447
|
+
itemId: part.id
|
|
2448
|
+
}
|
|
2449
|
+
}
|
|
2443
2450
|
});
|
|
2444
2451
|
for (const annotation of contentPart.annotations) {
|
|
2445
2452
|
content.push({
|
|
@@ -2616,7 +2623,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2616
2623
|
} else if (value.item.type === "message") {
|
|
2617
2624
|
controller.enqueue({
|
|
2618
2625
|
type: "text-start",
|
|
2619
|
-
id: value.item.id
|
|
2626
|
+
id: value.item.id,
|
|
2627
|
+
providerMetadata: {
|
|
2628
|
+
openai: {
|
|
2629
|
+
itemId: value.item.id
|
|
2630
|
+
}
|
|
2631
|
+
}
|
|
2620
2632
|
});
|
|
2621
2633
|
} else if (isResponseOutputItemAddedReasoningChunk(value)) {
|
|
2622
2634
|
activeReasoning[value.item.id] = {
|