@ai-sdk/openai 2.0.0-beta.7 → 2.0.0-beta.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.js +36 -12
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +36 -12
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +36 -12
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +36 -12
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -2006,7 +2006,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2006
2006
|
prompt,
|
|
2007
2007
|
systemMessageMode
|
|
2008
2008
|
}) {
|
|
2009
|
-
var _a, _b;
|
|
2009
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2010
2010
|
const messages = [];
|
|
2011
2011
|
const warnings = [];
|
|
2012
2012
|
for (const { role, content } of prompt) {
|
|
@@ -2041,7 +2041,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2041
2041
|
messages.push({
|
|
2042
2042
|
role: "user",
|
|
2043
2043
|
content: content.map((part, index) => {
|
|
2044
|
-
var _a2, _b2,
|
|
2044
|
+
var _a2, _b2, _c2;
|
|
2045
2045
|
switch (part.type) {
|
|
2046
2046
|
case "text": {
|
|
2047
2047
|
return { type: "input_text", text: part.text };
|
|
@@ -2063,7 +2063,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2063
2063
|
}
|
|
2064
2064
|
return {
|
|
2065
2065
|
type: "input_file",
|
|
2066
|
-
filename: (
|
|
2066
|
+
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
2067
2067
|
file_data: `data:application/pdf;base64,${part.data}`
|
|
2068
2068
|
};
|
|
2069
2069
|
} else {
|
|
@@ -2084,7 +2084,8 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2084
2084
|
case "text": {
|
|
2085
2085
|
messages.push({
|
|
2086
2086
|
role: "assistant",
|
|
2087
|
-
content: [{ type: "output_text", text: part.text }]
|
|
2087
|
+
content: [{ type: "output_text", text: part.text }],
|
|
2088
|
+
id: (_c = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId) != null ? _c : void 0
|
|
2088
2089
|
});
|
|
2089
2090
|
break;
|
|
2090
2091
|
}
|
|
@@ -2096,7 +2097,8 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2096
2097
|
type: "function_call",
|
|
2097
2098
|
call_id: part.toolCallId,
|
|
2098
2099
|
name: part.toolName,
|
|
2099
|
-
arguments: JSON.stringify(part.input)
|
|
2100
|
+
arguments: JSON.stringify(part.input),
|
|
2101
|
+
id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0
|
|
2100
2102
|
});
|
|
2101
2103
|
break;
|
|
2102
2104
|
}
|
|
@@ -2113,7 +2115,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2113
2115
|
providerOptions: part.providerOptions,
|
|
2114
2116
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
2115
2117
|
});
|
|
2116
|
-
const reasoningId = (
|
|
2118
|
+
const reasoningId = (_g = providerOptions == null ? void 0 : providerOptions.reasoning) == null ? void 0 : _g.id;
|
|
2117
2119
|
if (reasoningId != null) {
|
|
2118
2120
|
const existingReasoningMessage = reasoningMessages[reasoningId];
|
|
2119
2121
|
const summaryParts = [];
|
|
@@ -2129,7 +2131,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2129
2131
|
reasoningMessages[reasoningId] = {
|
|
2130
2132
|
type: "reasoning",
|
|
2131
2133
|
id: reasoningId,
|
|
2132
|
-
encrypted_content: (
|
|
2134
|
+
encrypted_content: (_h = providerOptions == null ? void 0 : providerOptions.reasoning) == null ? void 0 : _h.encryptedContent,
|
|
2133
2135
|
summary: summaryParts
|
|
2134
2136
|
};
|
|
2135
2137
|
messages.push(reasoningMessages[reasoningId]);
|
|
@@ -2476,6 +2478,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2476
2478
|
z15.object({
|
|
2477
2479
|
type: z15.literal("message"),
|
|
2478
2480
|
role: z15.literal("assistant"),
|
|
2481
|
+
id: z15.string(),
|
|
2479
2482
|
content: z15.array(
|
|
2480
2483
|
z15.object({
|
|
2481
2484
|
type: z15.literal("output_text"),
|
|
@@ -2496,7 +2499,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2496
2499
|
type: z15.literal("function_call"),
|
|
2497
2500
|
call_id: z15.string(),
|
|
2498
2501
|
name: z15.string(),
|
|
2499
|
-
arguments: z15.string()
|
|
2502
|
+
arguments: z15.string(),
|
|
2503
|
+
id: z15.string()
|
|
2500
2504
|
}),
|
|
2501
2505
|
z15.object({
|
|
2502
2506
|
type: z15.literal("web_search_call"),
|
|
@@ -2566,7 +2570,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2566
2570
|
for (const contentPart of part.content) {
|
|
2567
2571
|
content.push({
|
|
2568
2572
|
type: "text",
|
|
2569
|
-
text: contentPart.text
|
|
2573
|
+
text: contentPart.text,
|
|
2574
|
+
providerMetadata: {
|
|
2575
|
+
openai: {
|
|
2576
|
+
itemId: part.id
|
|
2577
|
+
}
|
|
2578
|
+
}
|
|
2570
2579
|
});
|
|
2571
2580
|
for (const annotation of contentPart.annotations) {
|
|
2572
2581
|
content.push({
|
|
@@ -2585,7 +2594,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2585
2594
|
type: "tool-call",
|
|
2586
2595
|
toolCallId: part.call_id,
|
|
2587
2596
|
toolName: part.name,
|
|
2588
|
-
input: part.arguments
|
|
2597
|
+
input: part.arguments,
|
|
2598
|
+
providerMetadata: {
|
|
2599
|
+
openai: {
|
|
2600
|
+
itemId: part.id
|
|
2601
|
+
}
|
|
2602
|
+
}
|
|
2589
2603
|
});
|
|
2590
2604
|
break;
|
|
2591
2605
|
}
|
|
@@ -2738,7 +2752,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2738
2752
|
} else if (value.item.type === "message") {
|
|
2739
2753
|
controller.enqueue({
|
|
2740
2754
|
type: "text-start",
|
|
2741
|
-
id: value.item.id
|
|
2755
|
+
id: value.item.id,
|
|
2756
|
+
providerMetadata: {
|
|
2757
|
+
openai: {
|
|
2758
|
+
itemId: value.item.id
|
|
2759
|
+
}
|
|
2760
|
+
}
|
|
2742
2761
|
});
|
|
2743
2762
|
} else if (isResponseOutputItemAddedReasoningChunk(value)) {
|
|
2744
2763
|
activeReasoning[value.item.id] = {
|
|
@@ -2770,7 +2789,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2770
2789
|
type: "tool-call",
|
|
2771
2790
|
toolCallId: value.item.call_id,
|
|
2772
2791
|
toolName: value.item.name,
|
|
2773
|
-
input: value.item.arguments
|
|
2792
|
+
input: value.item.arguments,
|
|
2793
|
+
providerMetadata: {
|
|
2794
|
+
openai: {
|
|
2795
|
+
itemId: value.item.id
|
|
2796
|
+
}
|
|
2797
|
+
}
|
|
2774
2798
|
});
|
|
2775
2799
|
} else if (value.item.type === "web_search_call") {
|
|
2776
2800
|
ongoingToolCalls[value.output_index] = void 0;
|