@ai-sdk/openai 3.0.0-beta.18 → 3.0.0-beta.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +25 -5
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +25 -5
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +1 -1
- package/dist/internal/index.d.ts +1 -1
- package/dist/internal/index.js +24 -4
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +24 -4
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# @ai-sdk/openai
|
|
2
2
|
|
|
3
|
+
## 3.0.0-beta.20
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 68c6187: feat(provider/openai): support file and image tool results
|
|
8
|
+
|
|
9
|
+
## 3.0.0-beta.19
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- 484aa93: Add 'default' as service tier
|
|
14
|
+
|
|
3
15
|
## 3.0.0-beta.18
|
|
4
16
|
|
|
5
17
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -14,7 +14,7 @@ declare const openaiChatLanguageModelOptions: _ai_sdk_provider_utils.LazyValidat
|
|
|
14
14
|
metadata?: Record<string, string> | undefined;
|
|
15
15
|
prediction?: Record<string, any> | undefined;
|
|
16
16
|
structuredOutputs?: boolean | undefined;
|
|
17
|
-
serviceTier?: "auto" | "flex" | "priority" | undefined;
|
|
17
|
+
serviceTier?: "default" | "auto" | "flex" | "priority" | undefined;
|
|
18
18
|
strictJsonSchema?: boolean | undefined;
|
|
19
19
|
textVerbosity?: "low" | "medium" | "high" | undefined;
|
|
20
20
|
promptCacheKey?: string | undefined;
|
|
@@ -263,7 +263,7 @@ declare const openaiResponsesProviderOptionsSchema: _ai_sdk_provider_utils.LazyV
|
|
|
263
263
|
reasoningEffort?: string | null | undefined;
|
|
264
264
|
reasoningSummary?: string | null | undefined;
|
|
265
265
|
safetyIdentifier?: string | null | undefined;
|
|
266
|
-
serviceTier?: "auto" | "flex" | "priority" | null | undefined;
|
|
266
|
+
serviceTier?: "default" | "auto" | "flex" | "priority" | null | undefined;
|
|
267
267
|
store?: boolean | null | undefined;
|
|
268
268
|
strictJsonSchema?: boolean | null | undefined;
|
|
269
269
|
textVerbosity?: "low" | "medium" | "high" | null | undefined;
|
package/dist/index.d.ts
CHANGED
|
@@ -14,7 +14,7 @@ declare const openaiChatLanguageModelOptions: _ai_sdk_provider_utils.LazyValidat
|
|
|
14
14
|
metadata?: Record<string, string> | undefined;
|
|
15
15
|
prediction?: Record<string, any> | undefined;
|
|
16
16
|
structuredOutputs?: boolean | undefined;
|
|
17
|
-
serviceTier?: "auto" | "flex" | "priority" | undefined;
|
|
17
|
+
serviceTier?: "default" | "auto" | "flex" | "priority" | undefined;
|
|
18
18
|
strictJsonSchema?: boolean | undefined;
|
|
19
19
|
textVerbosity?: "low" | "medium" | "high" | undefined;
|
|
20
20
|
promptCacheKey?: string | undefined;
|
|
@@ -263,7 +263,7 @@ declare const openaiResponsesProviderOptionsSchema: _ai_sdk_provider_utils.LazyV
|
|
|
263
263
|
reasoningEffort?: string | null | undefined;
|
|
264
264
|
reasoningSummary?: string | null | undefined;
|
|
265
265
|
safetyIdentifier?: string | null | undefined;
|
|
266
|
-
serviceTier?: "auto" | "flex" | "priority" | null | undefined;
|
|
266
|
+
serviceTier?: "default" | "auto" | "flex" | "priority" | null | undefined;
|
|
267
267
|
store?: boolean | null | undefined;
|
|
268
268
|
strictJsonSchema?: boolean | null | undefined;
|
|
269
269
|
textVerbosity?: "low" | "medium" | "high" | null | undefined;
|
package/dist/index.js
CHANGED
|
@@ -472,13 +472,15 @@ var openaiChatLanguageModelOptions = (0, import_provider_utils4.lazyValidator)(
|
|
|
472
472
|
structuredOutputs: z3.boolean().optional(),
|
|
473
473
|
/**
|
|
474
474
|
* Service tier for the request.
|
|
475
|
-
* - 'auto': Default service tier
|
|
475
|
+
* - 'auto': Default service tier. The request will be processed with the service tier configured in the
|
|
476
|
+
* Project settings. Unless otherwise configured, the Project will use 'default'.
|
|
476
477
|
* - 'flex': 50% cheaper processing at the cost of increased latency. Only available for o3 and o4-mini models.
|
|
477
478
|
* - 'priority': Higher-speed processing with predictably low latency at premium cost. Available for Enterprise customers.
|
|
479
|
+
* - 'default': The request will be processed with the standard pricing and performance for the selected model.
|
|
478
480
|
*
|
|
479
481
|
* @default 'auto'
|
|
480
482
|
*/
|
|
481
|
-
serviceTier: z3.enum(["auto", "flex", "priority"]).optional(),
|
|
483
|
+
serviceTier: z3.enum(["auto", "flex", "priority", "default"]).optional(),
|
|
482
484
|
/**
|
|
483
485
|
* Whether to use strict JSON schema validation.
|
|
484
486
|
*
|
|
@@ -2350,11 +2352,29 @@ async function convertToOpenAIResponsesInput({
|
|
|
2350
2352
|
case "execution-denied":
|
|
2351
2353
|
contentValue = (_j = output.reason) != null ? _j : "Tool execution denied.";
|
|
2352
2354
|
break;
|
|
2353
|
-
case "content":
|
|
2354
2355
|
case "json":
|
|
2355
2356
|
case "error-json":
|
|
2356
2357
|
contentValue = JSON.stringify(output.value);
|
|
2357
2358
|
break;
|
|
2359
|
+
case "content":
|
|
2360
|
+
contentValue = output.value.map((item) => {
|
|
2361
|
+
switch (item.type) {
|
|
2362
|
+
case "text": {
|
|
2363
|
+
return { type: "input_text", text: item.text };
|
|
2364
|
+
}
|
|
2365
|
+
case "media": {
|
|
2366
|
+
return item.mediaType.startsWith("image/") ? {
|
|
2367
|
+
type: "input_image",
|
|
2368
|
+
image_url: `data:${item.mediaType};base64,${item.data}`
|
|
2369
|
+
} : {
|
|
2370
|
+
type: "input_file",
|
|
2371
|
+
filename: "data",
|
|
2372
|
+
file_data: `data:${item.mediaType};base64,${item.data}`
|
|
2373
|
+
};
|
|
2374
|
+
}
|
|
2375
|
+
}
|
|
2376
|
+
});
|
|
2377
|
+
break;
|
|
2358
2378
|
}
|
|
2359
2379
|
input.push({
|
|
2360
2380
|
type: "function_call_output",
|
|
@@ -2915,7 +2935,7 @@ var openaiResponsesProviderOptionsSchema = (0, import_provider_utils22.lazyValid
|
|
|
2915
2935
|
reasoningEffort: z17.string().nullish(),
|
|
2916
2936
|
reasoningSummary: z17.string().nullish(),
|
|
2917
2937
|
safetyIdentifier: z17.string().nullish(),
|
|
2918
|
-
serviceTier: z17.enum(["auto", "flex", "priority"]).nullish(),
|
|
2938
|
+
serviceTier: z17.enum(["auto", "flex", "priority", "default"]).nullish(),
|
|
2919
2939
|
store: z17.boolean().nullish(),
|
|
2920
2940
|
strictJsonSchema: z17.boolean().nullish(),
|
|
2921
2941
|
textVerbosity: z17.enum(["low", "medium", "high"]).nullish(),
|
|
@@ -4422,7 +4442,7 @@ var OpenAITranscriptionModel = class {
|
|
|
4422
4442
|
};
|
|
4423
4443
|
|
|
4424
4444
|
// src/version.ts
|
|
4425
|
-
var VERSION = true ? "3.0.0-beta.
|
|
4445
|
+
var VERSION = true ? "3.0.0-beta.20" : "0.0.0-test";
|
|
4426
4446
|
|
|
4427
4447
|
// src/openai-provider.ts
|
|
4428
4448
|
function createOpenAI(options = {}) {
|