@ai-sdk/openai 2.0.29 → 2.0.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/index.js +45 -44
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +17 -16
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +34 -33
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +17 -16
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/internal/index.js
CHANGED
|
@@ -2137,36 +2137,35 @@ var OpenAISpeechModel = class {
|
|
|
2137
2137
|
|
|
2138
2138
|
// src/responses/openai-responses-language-model.ts
|
|
2139
2139
|
var import_provider8 = require("@ai-sdk/provider");
|
|
2140
|
-
var
|
|
2140
|
+
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
|
2141
2141
|
var import_v417 = require("zod/v4");
|
|
2142
2142
|
|
|
2143
|
-
// src/responses/convert-to-openai-responses-
|
|
2143
|
+
// src/responses/convert-to-openai-responses-input.ts
|
|
2144
2144
|
var import_provider6 = require("@ai-sdk/provider");
|
|
2145
2145
|
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
2146
2146
|
var import_v414 = require("zod/v4");
|
|
2147
|
-
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
2148
2147
|
function isFileId(data, prefixes) {
|
|
2149
2148
|
if (!prefixes) return false;
|
|
2150
2149
|
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
2151
2150
|
}
|
|
2152
|
-
async function
|
|
2151
|
+
async function convertToOpenAIResponsesInput({
|
|
2153
2152
|
prompt,
|
|
2154
2153
|
systemMessageMode,
|
|
2155
2154
|
fileIdPrefixes
|
|
2156
2155
|
}) {
|
|
2157
2156
|
var _a, _b, _c, _d, _e, _f;
|
|
2158
|
-
const
|
|
2157
|
+
const input = [];
|
|
2159
2158
|
const warnings = [];
|
|
2160
2159
|
for (const { role, content } of prompt) {
|
|
2161
2160
|
switch (role) {
|
|
2162
2161
|
case "system": {
|
|
2163
2162
|
switch (systemMessageMode) {
|
|
2164
2163
|
case "system": {
|
|
2165
|
-
|
|
2164
|
+
input.push({ role: "system", content });
|
|
2166
2165
|
break;
|
|
2167
2166
|
}
|
|
2168
2167
|
case "developer": {
|
|
2169
|
-
|
|
2168
|
+
input.push({ role: "developer", content });
|
|
2170
2169
|
break;
|
|
2171
2170
|
}
|
|
2172
2171
|
case "remove": {
|
|
@@ -2186,7 +2185,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2186
2185
|
break;
|
|
2187
2186
|
}
|
|
2188
2187
|
case "user": {
|
|
2189
|
-
|
|
2188
|
+
input.push({
|
|
2190
2189
|
role: "user",
|
|
2191
2190
|
content: content.map((part, index) => {
|
|
2192
2191
|
var _a2, _b2, _c2;
|
|
@@ -2200,7 +2199,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2200
2199
|
return {
|
|
2201
2200
|
type: "input_image",
|
|
2202
2201
|
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2203
|
-
image_url: `data:${mediaType};base64,${(0,
|
|
2202
|
+
image_url: `data:${mediaType};base64,${(0, import_provider_utils11.convertToBase64)(part.data)}`
|
|
2204
2203
|
},
|
|
2205
2204
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
2206
2205
|
};
|
|
@@ -2215,7 +2214,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2215
2214
|
type: "input_file",
|
|
2216
2215
|
...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2217
2216
|
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
2218
|
-
file_data: `data:application/pdf;base64,${(0,
|
|
2217
|
+
file_data: `data:application/pdf;base64,${(0, import_provider_utils11.convertToBase64)(part.data)}`
|
|
2219
2218
|
}
|
|
2220
2219
|
};
|
|
2221
2220
|
} else {
|
|
@@ -2231,10 +2230,11 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2231
2230
|
}
|
|
2232
2231
|
case "assistant": {
|
|
2233
2232
|
const reasoningMessages = {};
|
|
2233
|
+
const toolCallParts = {};
|
|
2234
2234
|
for (const part of content) {
|
|
2235
2235
|
switch (part.type) {
|
|
2236
2236
|
case "text": {
|
|
2237
|
-
|
|
2237
|
+
input.push({
|
|
2238
2238
|
role: "assistant",
|
|
2239
2239
|
content: [{ type: "output_text", text: part.text }],
|
|
2240
2240
|
id: (_c = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId) != null ? _c : void 0
|
|
@@ -2242,10 +2242,11 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2242
2242
|
break;
|
|
2243
2243
|
}
|
|
2244
2244
|
case "tool-call": {
|
|
2245
|
+
toolCallParts[part.toolCallId] = part;
|
|
2245
2246
|
if (part.providerExecuted) {
|
|
2246
2247
|
break;
|
|
2247
2248
|
}
|
|
2248
|
-
|
|
2249
|
+
input.push({
|
|
2249
2250
|
type: "function_call",
|
|
2250
2251
|
call_id: part.toolCallId,
|
|
2251
2252
|
name: part.toolName,
|
|
@@ -2286,7 +2287,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2286
2287
|
encrypted_content: providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent,
|
|
2287
2288
|
summary: summaryParts
|
|
2288
2289
|
};
|
|
2289
|
-
|
|
2290
|
+
input.push(reasoningMessages[reasoningId]);
|
|
2290
2291
|
} else {
|
|
2291
2292
|
existingReasoningMessage.summary.push(...summaryParts);
|
|
2292
2293
|
}
|
|
@@ -2317,7 +2318,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2317
2318
|
contentValue = JSON.stringify(output.value);
|
|
2318
2319
|
break;
|
|
2319
2320
|
}
|
|
2320
|
-
|
|
2321
|
+
input.push({
|
|
2321
2322
|
type: "function_call_output",
|
|
2322
2323
|
call_id: part.toolCallId,
|
|
2323
2324
|
output: contentValue
|
|
@@ -2331,7 +2332,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2331
2332
|
}
|
|
2332
2333
|
}
|
|
2333
2334
|
}
|
|
2334
|
-
return {
|
|
2335
|
+
return { input, warnings };
|
|
2335
2336
|
}
|
|
2336
2337
|
var openaiResponsesReasoningProviderOptionsSchema = import_v414.z.object({
|
|
2337
2338
|
itemId: import_v414.z.string().nullish(),
|
|
@@ -2360,7 +2361,7 @@ function mapOpenAIResponseFinishReason({
|
|
|
2360
2361
|
var import_provider7 = require("@ai-sdk/provider");
|
|
2361
2362
|
|
|
2362
2363
|
// src/tool/code-interpreter.ts
|
|
2363
|
-
var
|
|
2364
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
2364
2365
|
var import_v415 = require("zod/v4");
|
|
2365
2366
|
var codeInterpreterInputSchema = import_v415.z.object({
|
|
2366
2367
|
code: import_v415.z.string().nullish(),
|
|
@@ -2382,7 +2383,7 @@ var codeInterpreterArgsSchema = import_v415.z.object({
|
|
|
2382
2383
|
})
|
|
2383
2384
|
]).optional()
|
|
2384
2385
|
});
|
|
2385
|
-
var codeInterpreterToolFactory = (0,
|
|
2386
|
+
var codeInterpreterToolFactory = (0, import_provider_utils12.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
2386
2387
|
id: "openai.code_interpreter",
|
|
2387
2388
|
name: "code_interpreter",
|
|
2388
2389
|
inputSchema: codeInterpreterInputSchema,
|
|
@@ -2390,7 +2391,7 @@ var codeInterpreterToolFactory = (0, import_provider_utils13.createProviderDefin
|
|
|
2390
2391
|
});
|
|
2391
2392
|
|
|
2392
2393
|
// src/tool/web-search.ts
|
|
2393
|
-
var
|
|
2394
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
2394
2395
|
var import_v416 = require("zod/v4");
|
|
2395
2396
|
var webSearchArgsSchema = import_v416.z.object({
|
|
2396
2397
|
filters: import_v416.z.object({
|
|
@@ -2405,7 +2406,7 @@ var webSearchArgsSchema = import_v416.z.object({
|
|
|
2405
2406
|
timezone: import_v416.z.string().optional()
|
|
2406
2407
|
}).optional()
|
|
2407
2408
|
});
|
|
2408
|
-
var webSearchToolFactory = (0,
|
|
2409
|
+
var webSearchToolFactory = (0, import_provider_utils13.createProviderDefinedToolFactory)({
|
|
2409
2410
|
id: "openai.web_search",
|
|
2410
2411
|
name: "web_search",
|
|
2411
2412
|
inputSchema: import_v416.z.object({
|
|
@@ -2624,13 +2625,13 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2624
2625
|
if (stopSequences != null) {
|
|
2625
2626
|
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
2626
2627
|
}
|
|
2627
|
-
const {
|
|
2628
|
+
const { input, warnings: inputWarnings } = await convertToOpenAIResponsesInput({
|
|
2628
2629
|
prompt,
|
|
2629
2630
|
systemMessageMode: modelConfig.systemMessageMode,
|
|
2630
2631
|
fileIdPrefixes: this.config.fileIdPrefixes
|
|
2631
2632
|
});
|
|
2632
|
-
warnings.push(...
|
|
2633
|
-
const openaiOptions = await (0,
|
|
2633
|
+
warnings.push(...inputWarnings);
|
|
2634
|
+
const openaiOptions = await (0, import_provider_utils14.parseProviderOptions)({
|
|
2634
2635
|
provider: "openai",
|
|
2635
2636
|
providerOptions,
|
|
2636
2637
|
schema: openaiResponsesProviderOptionsSchema
|
|
@@ -2649,7 +2650,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2649
2650
|
include = codeInterpreterToolName ? Array.isArray(include) ? [...include, "code_interpreter_call.outputs"] : ["code_interpreter_call.outputs"] : include;
|
|
2650
2651
|
const baseArgs = {
|
|
2651
2652
|
model: this.modelId,
|
|
2652
|
-
input
|
|
2653
|
+
input,
|
|
2653
2654
|
temperature,
|
|
2654
2655
|
top_p: topP,
|
|
2655
2656
|
max_output_tokens: maxOutputTokens,
|
|
@@ -2779,12 +2780,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2779
2780
|
responseHeaders,
|
|
2780
2781
|
value: response,
|
|
2781
2782
|
rawValue: rawResponse
|
|
2782
|
-
} = await (0,
|
|
2783
|
+
} = await (0, import_provider_utils14.postJsonToApi)({
|
|
2783
2784
|
url,
|
|
2784
|
-
headers: (0,
|
|
2785
|
+
headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
|
|
2785
2786
|
body,
|
|
2786
2787
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2787
|
-
successfulResponseHandler: (0,
|
|
2788
|
+
successfulResponseHandler: (0, import_provider_utils14.createJsonResponseHandler)(
|
|
2788
2789
|
import_v417.z.object({
|
|
2789
2790
|
id: import_v417.z.string(),
|
|
2790
2791
|
created_at: import_v417.z.number(),
|
|
@@ -2934,7 +2935,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2934
2935
|
content.push({
|
|
2935
2936
|
type: "source",
|
|
2936
2937
|
sourceType: "url",
|
|
2937
|
-
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0,
|
|
2938
|
+
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils14.generateId)(),
|
|
2938
2939
|
url: annotation.url,
|
|
2939
2940
|
title: annotation.title
|
|
2940
2941
|
});
|
|
@@ -2942,7 +2943,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2942
2943
|
content.push({
|
|
2943
2944
|
type: "source",
|
|
2944
2945
|
sourceType: "document",
|
|
2945
|
-
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0,
|
|
2946
|
+
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils14.generateId)(),
|
|
2946
2947
|
mediaType: "text/plain",
|
|
2947
2948
|
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
2948
2949
|
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
@@ -3090,18 +3091,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3090
3091
|
warnings,
|
|
3091
3092
|
webSearchToolName
|
|
3092
3093
|
} = await this.getArgs(options);
|
|
3093
|
-
const { responseHeaders, value: response } = await (0,
|
|
3094
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils14.postJsonToApi)({
|
|
3094
3095
|
url: this.config.url({
|
|
3095
3096
|
path: "/responses",
|
|
3096
3097
|
modelId: this.modelId
|
|
3097
3098
|
}),
|
|
3098
|
-
headers: (0,
|
|
3099
|
+
headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
|
|
3099
3100
|
body: {
|
|
3100
3101
|
...body,
|
|
3101
3102
|
stream: true
|
|
3102
3103
|
},
|
|
3103
3104
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3104
|
-
successfulResponseHandler: (0,
|
|
3105
|
+
successfulResponseHandler: (0, import_provider_utils14.createEventSourceResponseHandler)(
|
|
3105
3106
|
openaiResponsesChunkSchema
|
|
3106
3107
|
),
|
|
3107
3108
|
abortSignal: options.abortSignal,
|
|
@@ -3403,7 +3404,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3403
3404
|
controller.enqueue({
|
|
3404
3405
|
type: "source",
|
|
3405
3406
|
sourceType: "url",
|
|
3406
|
-
id: (_o = (_n = (_m = self.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0,
|
|
3407
|
+
id: (_o = (_n = (_m = self.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils14.generateId)(),
|
|
3407
3408
|
url: value.annotation.url,
|
|
3408
3409
|
title: value.annotation.title
|
|
3409
3410
|
});
|
|
@@ -3411,7 +3412,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3411
3412
|
controller.enqueue({
|
|
3412
3413
|
type: "source",
|
|
3413
3414
|
sourceType: "document",
|
|
3414
|
-
id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0,
|
|
3415
|
+
id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0, import_provider_utils14.generateId)(),
|
|
3415
3416
|
mediaType: "text/plain",
|
|
3416
3417
|
title: (_t = (_s = value.annotation.quote) != null ? _s : value.annotation.filename) != null ? _t : "Document",
|
|
3417
3418
|
filename: (_u = value.annotation.filename) != null ? _u : value.annotation.file_id
|