@ai-sdk/openai 2.1.0-beta.8 → 3.0.0-beta.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +73 -0
- package/dist/index.d.mts +16 -10
- package/dist/index.d.ts +16 -10
- package/dist/index.js +67 -20
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +68 -20
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +115 -11
- package/dist/internal/index.d.ts +115 -11
- package/dist/internal/index.js +59 -13
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +56 -13
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
// src/openai-provider.ts
|
|
2
2
|
import {
|
|
3
3
|
loadApiKey,
|
|
4
|
+
loadOptionalSetting,
|
|
4
5
|
withoutTrailingSlash,
|
|
5
6
|
withUserAgentSuffix
|
|
6
7
|
} from "@ai-sdk/provider-utils";
|
|
@@ -48,6 +49,7 @@ function convertToOpenAIChatMessages({
|
|
|
48
49
|
prompt,
|
|
49
50
|
systemMessageMode = "system"
|
|
50
51
|
}) {
|
|
52
|
+
var _a;
|
|
51
53
|
const messages = [];
|
|
52
54
|
const warnings = [];
|
|
53
55
|
for (const { role, content } of prompt) {
|
|
@@ -86,7 +88,7 @@ function convertToOpenAIChatMessages({
|
|
|
86
88
|
messages.push({
|
|
87
89
|
role: "user",
|
|
88
90
|
content: content.map((part, index) => {
|
|
89
|
-
var
|
|
91
|
+
var _a2, _b, _c;
|
|
90
92
|
switch (part.type) {
|
|
91
93
|
case "text": {
|
|
92
94
|
return { type: "text", text: part.text };
|
|
@@ -99,7 +101,7 @@ function convertToOpenAIChatMessages({
|
|
|
99
101
|
image_url: {
|
|
100
102
|
url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${convertToBase64(part.data)}`,
|
|
101
103
|
// OpenAI specific extension: image detail
|
|
102
|
-
detail: (_b = (
|
|
104
|
+
detail: (_b = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b.imageDetail
|
|
103
105
|
}
|
|
104
106
|
};
|
|
105
107
|
} else if (part.mediaType.startsWith("audio/")) {
|
|
@@ -196,6 +198,9 @@ function convertToOpenAIChatMessages({
|
|
|
196
198
|
case "error-text":
|
|
197
199
|
contentValue = output.value;
|
|
198
200
|
break;
|
|
201
|
+
case "execution-denied":
|
|
202
|
+
contentValue = (_a = output.reason) != null ? _a : "Tool execution denied.";
|
|
203
|
+
break;
|
|
199
204
|
case "content":
|
|
200
205
|
case "json":
|
|
201
206
|
case "error-json":
|
|
@@ -1600,9 +1605,13 @@ import { z as z8 } from "zod/v4";
|
|
|
1600
1605
|
var modelMaxImagesPerCall = {
|
|
1601
1606
|
"dall-e-3": 1,
|
|
1602
1607
|
"dall-e-2": 10,
|
|
1603
|
-
"gpt-image-1": 10
|
|
1608
|
+
"gpt-image-1": 10,
|
|
1609
|
+
"gpt-image-1-mini": 10
|
|
1604
1610
|
};
|
|
1605
|
-
var hasDefaultResponseFormat = /* @__PURE__ */ new Set([
|
|
1611
|
+
var hasDefaultResponseFormat = /* @__PURE__ */ new Set([
|
|
1612
|
+
"gpt-image-1",
|
|
1613
|
+
"gpt-image-1-mini"
|
|
1614
|
+
]);
|
|
1606
1615
|
|
|
1607
1616
|
// src/image/openai-image-model.ts
|
|
1608
1617
|
var OpenAIImageModel = class {
|
|
@@ -1777,6 +1786,7 @@ var imageGenerationArgsSchema = z11.object({
|
|
|
1777
1786
|
moderation: z11.enum(["auto"]).optional(),
|
|
1778
1787
|
outputCompression: z11.number().int().min(0).max(100).optional(),
|
|
1779
1788
|
outputFormat: z11.enum(["png", "jpeg", "webp"]).optional(),
|
|
1789
|
+
partialImages: z11.number().int().min(0).max(3).optional(),
|
|
1780
1790
|
quality: z11.enum(["auto", "low", "medium", "high"]).optional(),
|
|
1781
1791
|
size: z11.enum(["1024x1024", "1024x1536", "1536x1024", "auto"]).optional()
|
|
1782
1792
|
}).strict();
|
|
@@ -1948,11 +1958,16 @@ var openaiTools = {
|
|
|
1948
1958
|
*
|
|
1949
1959
|
* Must have name `image_generation`.
|
|
1950
1960
|
*
|
|
1951
|
-
* @param
|
|
1952
|
-
* @param
|
|
1953
|
-
* @param
|
|
1954
|
-
* @param
|
|
1955
|
-
* @param
|
|
1961
|
+
* @param background - Background type for the generated image. One of 'auto', 'opaque', or 'transparent'.
|
|
1962
|
+
* @param inputFidelity - Input fidelity for the generated image. One of 'low' or 'high'.
|
|
1963
|
+
* @param inputImageMask - Optional mask for inpainting. Contains fileId and/or imageUrl.
|
|
1964
|
+
* @param model - The image generation model to use. Default: gpt-image-1.
|
|
1965
|
+
* @param moderation - Moderation level for the generated image. Default: 'auto'.
|
|
1966
|
+
* @param outputCompression - Compression level for the output image (0-100).
|
|
1967
|
+
* @param outputFormat - The output format of the generated image. One of 'png', 'jpeg', or 'webp'.
|
|
1968
|
+
* @param partialImages - Number of partial images to generate in streaming mode (0-3).
|
|
1969
|
+
* @param quality - The quality of the generated image. One of 'auto', 'low', 'medium', or 'high'.
|
|
1970
|
+
* @param size - The size of the generated image. One of 'auto', '1024x1024', '1024x1536', or '1536x1024'.
|
|
1956
1971
|
*/
|
|
1957
1972
|
imageGeneration,
|
|
1958
1973
|
/**
|
|
@@ -2020,7 +2035,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2020
2035
|
store,
|
|
2021
2036
|
hasLocalShellTool = false
|
|
2022
2037
|
}) {
|
|
2023
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
2038
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2024
2039
|
const input = [];
|
|
2025
2040
|
const warnings = [];
|
|
2026
2041
|
for (const { role, content } of prompt) {
|
|
@@ -2223,6 +2238,9 @@ async function convertToOpenAIResponsesInput({
|
|
|
2223
2238
|
case "error-text":
|
|
2224
2239
|
contentValue = output.value;
|
|
2225
2240
|
break;
|
|
2241
|
+
case "execution-denied":
|
|
2242
|
+
contentValue = (_j = output.reason) != null ? _j : "Tool execution denied.";
|
|
2243
|
+
break;
|
|
2226
2244
|
case "content":
|
|
2227
2245
|
case "json":
|
|
2228
2246
|
case "error-json":
|
|
@@ -2354,11 +2372,12 @@ function prepareResponsesTools({
|
|
|
2354
2372
|
image_url: args.inputImageMask.imageUrl
|
|
2355
2373
|
} : void 0,
|
|
2356
2374
|
model: args.model,
|
|
2357
|
-
size: args.size,
|
|
2358
|
-
quality: args.quality,
|
|
2359
2375
|
moderation: args.moderation,
|
|
2376
|
+
partial_images: args.partialImages,
|
|
2377
|
+
quality: args.quality,
|
|
2378
|
+
output_compression: args.outputCompression,
|
|
2360
2379
|
output_format: args.outputFormat,
|
|
2361
|
-
|
|
2380
|
+
size: args.size
|
|
2362
2381
|
});
|
|
2363
2382
|
break;
|
|
2364
2383
|
}
|
|
@@ -3094,7 +3113,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3094
3113
|
controller.enqueue({
|
|
3095
3114
|
type: "tool-input-start",
|
|
3096
3115
|
id: value.item.id,
|
|
3097
|
-
toolName: webSearchToolName != null ? webSearchToolName : "web_search"
|
|
3116
|
+
toolName: webSearchToolName != null ? webSearchToolName : "web_search",
|
|
3117
|
+
providerExecuted: true
|
|
3098
3118
|
});
|
|
3099
3119
|
} else if (value.item.type === "computer_call") {
|
|
3100
3120
|
ongoingToolCalls[value.output_index] = {
|
|
@@ -3104,7 +3124,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3104
3124
|
controller.enqueue({
|
|
3105
3125
|
type: "tool-input-start",
|
|
3106
3126
|
id: value.item.id,
|
|
3107
|
-
toolName: "computer_use"
|
|
3127
|
+
toolName: "computer_use",
|
|
3128
|
+
providerExecuted: true
|
|
3108
3129
|
});
|
|
3109
3130
|
} else if (value.item.type === "code_interpreter_call") {
|
|
3110
3131
|
ongoingToolCalls[value.output_index] = {
|
|
@@ -3117,7 +3138,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3117
3138
|
controller.enqueue({
|
|
3118
3139
|
type: "tool-input-start",
|
|
3119
3140
|
id: value.item.id,
|
|
3120
|
-
toolName: "code_interpreter"
|
|
3141
|
+
toolName: "code_interpreter",
|
|
3142
|
+
providerExecuted: true
|
|
3121
3143
|
});
|
|
3122
3144
|
controller.enqueue({
|
|
3123
3145
|
type: "tool-input-delta",
|
|
@@ -3317,6 +3339,17 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3317
3339
|
delta: value.delta
|
|
3318
3340
|
});
|
|
3319
3341
|
}
|
|
3342
|
+
} else if (isResponseImageGenerationCallPartialImageChunk(value)) {
|
|
3343
|
+
controller.enqueue({
|
|
3344
|
+
type: "tool-result",
|
|
3345
|
+
toolCallId: value.item_id,
|
|
3346
|
+
toolName: "image_generation",
|
|
3347
|
+
result: {
|
|
3348
|
+
result: value.partial_image_b64
|
|
3349
|
+
},
|
|
3350
|
+
providerExecuted: true,
|
|
3351
|
+
preliminary: true
|
|
3352
|
+
});
|
|
3320
3353
|
} else if (isResponseCodeInterpreterCallCodeDeltaChunk(value)) {
|
|
3321
3354
|
const toolCall = ongoingToolCalls[value.output_index];
|
|
3322
3355
|
if (toolCall != null) {
|
|
@@ -3589,6 +3622,12 @@ var responseFunctionCallArgumentsDeltaSchema = z16.object({
|
|
|
3589
3622
|
output_index: z16.number(),
|
|
3590
3623
|
delta: z16.string()
|
|
3591
3624
|
});
|
|
3625
|
+
var responseImageGenerationCallPartialImageSchema = z16.object({
|
|
3626
|
+
type: z16.literal("response.image_generation_call.partial_image"),
|
|
3627
|
+
item_id: z16.string(),
|
|
3628
|
+
output_index: z16.number(),
|
|
3629
|
+
partial_image_b64: z16.string()
|
|
3630
|
+
});
|
|
3592
3631
|
var responseCodeInterpreterCallCodeDeltaSchema = z16.object({
|
|
3593
3632
|
type: z16.literal("response.code_interpreter_call_code.delta"),
|
|
3594
3633
|
item_id: z16.string(),
|
|
@@ -3638,6 +3677,7 @@ var openaiResponsesChunkSchema = z16.union([
|
|
|
3638
3677
|
responseOutputItemAddedSchema,
|
|
3639
3678
|
responseOutputItemDoneSchema,
|
|
3640
3679
|
responseFunctionCallArgumentsDeltaSchema,
|
|
3680
|
+
responseImageGenerationCallPartialImageSchema,
|
|
3641
3681
|
responseCodeInterpreterCallCodeDeltaSchema,
|
|
3642
3682
|
responseCodeInterpreterCallCodeDoneSchema,
|
|
3643
3683
|
responseAnnotationAddedSchema,
|
|
@@ -3665,6 +3705,9 @@ function isResponseCreatedChunk(chunk) {
|
|
|
3665
3705
|
function isResponseFunctionCallArgumentsDeltaChunk(chunk) {
|
|
3666
3706
|
return chunk.type === "response.function_call_arguments.delta";
|
|
3667
3707
|
}
|
|
3708
|
+
function isResponseImageGenerationCallPartialImageChunk(chunk) {
|
|
3709
|
+
return chunk.type === "response.image_generation_call.partial_image";
|
|
3710
|
+
}
|
|
3668
3711
|
function isResponseCodeInterpreterCallCodeDeltaChunk(chunk) {
|
|
3669
3712
|
return chunk.type === "response.code_interpreter_call_code.delta";
|
|
3670
3713
|
}
|
|
@@ -3781,7 +3824,7 @@ var OpenAISpeechModel = class {
|
|
|
3781
3824
|
constructor(modelId, config) {
|
|
3782
3825
|
this.modelId = modelId;
|
|
3783
3826
|
this.config = config;
|
|
3784
|
-
this.specificationVersion = "
|
|
3827
|
+
this.specificationVersion = "v3";
|
|
3785
3828
|
}
|
|
3786
3829
|
get provider() {
|
|
3787
3830
|
return this.config.provider;
|
|
@@ -3979,7 +4022,7 @@ var OpenAITranscriptionModel = class {
|
|
|
3979
4022
|
constructor(modelId, config) {
|
|
3980
4023
|
this.modelId = modelId;
|
|
3981
4024
|
this.config = config;
|
|
3982
|
-
this.specificationVersion = "
|
|
4025
|
+
this.specificationVersion = "v3";
|
|
3983
4026
|
}
|
|
3984
4027
|
get provider() {
|
|
3985
4028
|
return this.config.provider;
|
|
@@ -4109,12 +4152,17 @@ var openaiTranscriptionResponseSchema = z19.object({
|
|
|
4109
4152
|
});
|
|
4110
4153
|
|
|
4111
4154
|
// src/version.ts
|
|
4112
|
-
var VERSION = true ? "
|
|
4155
|
+
var VERSION = true ? "3.0.0-beta.17" : "0.0.0-test";
|
|
4113
4156
|
|
|
4114
4157
|
// src/openai-provider.ts
|
|
4115
4158
|
function createOpenAI(options = {}) {
|
|
4116
4159
|
var _a, _b;
|
|
4117
|
-
const baseURL = (_a = withoutTrailingSlash(
|
|
4160
|
+
const baseURL = (_a = withoutTrailingSlash(
|
|
4161
|
+
loadOptionalSetting({
|
|
4162
|
+
settingValue: options.baseURL,
|
|
4163
|
+
environmentVariableName: "OPENAI_BASE_URL"
|
|
4164
|
+
})
|
|
4165
|
+
)) != null ? _a : "https://api.openai.com/v1";
|
|
4118
4166
|
const providerName = (_b = options.name) != null ? _b : "openai";
|
|
4119
4167
|
const getHeaders = () => withUserAgentSuffix(
|
|
4120
4168
|
{
|