@ai-sdk/openai 2.0.45 → 2.0.46

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 2.0.46
4
+
5
+ ### Patch Changes
6
+
7
+ - 66f69e7: Add 'default' as service tier
8
+
3
9
  ## 2.0.45
4
10
 
5
11
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -14,7 +14,7 @@ declare const openaiChatLanguageModelOptions: _ai_sdk_provider_utils.LazyValidat
14
14
  metadata?: Record<string, string> | undefined;
15
15
  prediction?: Record<string, any> | undefined;
16
16
  structuredOutputs?: boolean | undefined;
17
- serviceTier?: "auto" | "flex" | "priority" | undefined;
17
+ serviceTier?: "default" | "auto" | "flex" | "priority" | undefined;
18
18
  strictJsonSchema?: boolean | undefined;
19
19
  textVerbosity?: "low" | "medium" | "high" | undefined;
20
20
  promptCacheKey?: string | undefined;
@@ -257,7 +257,7 @@ declare const openaiResponsesProviderOptionsSchema: _ai_sdk_provider_utils.LazyV
257
257
  reasoningEffort?: string | null | undefined;
258
258
  reasoningSummary?: string | null | undefined;
259
259
  safetyIdentifier?: string | null | undefined;
260
- serviceTier?: "auto" | "flex" | "priority" | null | undefined;
260
+ serviceTier?: "default" | "auto" | "flex" | "priority" | null | undefined;
261
261
  store?: boolean | null | undefined;
262
262
  strictJsonSchema?: boolean | null | undefined;
263
263
  textVerbosity?: "low" | "medium" | "high" | null | undefined;
package/dist/index.d.ts CHANGED
@@ -14,7 +14,7 @@ declare const openaiChatLanguageModelOptions: _ai_sdk_provider_utils.LazyValidat
14
14
  metadata?: Record<string, string> | undefined;
15
15
  prediction?: Record<string, any> | undefined;
16
16
  structuredOutputs?: boolean | undefined;
17
- serviceTier?: "auto" | "flex" | "priority" | undefined;
17
+ serviceTier?: "default" | "auto" | "flex" | "priority" | undefined;
18
18
  strictJsonSchema?: boolean | undefined;
19
19
  textVerbosity?: "low" | "medium" | "high" | undefined;
20
20
  promptCacheKey?: string | undefined;
@@ -257,7 +257,7 @@ declare const openaiResponsesProviderOptionsSchema: _ai_sdk_provider_utils.LazyV
257
257
  reasoningEffort?: string | null | undefined;
258
258
  reasoningSummary?: string | null | undefined;
259
259
  safetyIdentifier?: string | null | undefined;
260
- serviceTier?: "auto" | "flex" | "priority" | null | undefined;
260
+ serviceTier?: "default" | "auto" | "flex" | "priority" | null | undefined;
261
261
  store?: boolean | null | undefined;
262
262
  strictJsonSchema?: boolean | null | undefined;
263
263
  textVerbosity?: "low" | "medium" | "high" | null | undefined;
package/dist/index.js CHANGED
@@ -468,13 +468,15 @@ var openaiChatLanguageModelOptions = (0, import_provider_utils4.lazyValidator)(
468
468
  structuredOutputs: z3.boolean().optional(),
469
469
  /**
470
470
  * Service tier for the request.
471
- * - 'auto': Default service tier
471
+ * - 'auto': Default service tier. The request will be processed with the service tier configured in the
472
+ * Project settings. Unless otherwise configured, the Project will use 'default'.
472
473
  * - 'flex': 50% cheaper processing at the cost of increased latency. Only available for o3 and o4-mini models.
473
474
  * - 'priority': Higher-speed processing with predictably low latency at premium cost. Available for Enterprise customers.
475
+ * - 'default': The request will be processed with the standard pricing and performance for the selected model.
474
476
  *
475
477
  * @default 'auto'
476
478
  */
477
- serviceTier: z3.enum(["auto", "flex", "priority"]).optional(),
479
+ serviceTier: z3.enum(["auto", "flex", "priority", "default"]).optional(),
478
480
  /**
479
481
  * Whether to use strict JSON schema validation.
480
482
  *
@@ -2903,7 +2905,7 @@ var openaiResponsesProviderOptionsSchema = (0, import_provider_utils22.lazyValid
2903
2905
  reasoningEffort: z17.string().nullish(),
2904
2906
  reasoningSummary: z17.string().nullish(),
2905
2907
  safetyIdentifier: z17.string().nullish(),
2906
- serviceTier: z17.enum(["auto", "flex", "priority"]).nullish(),
2908
+ serviceTier: z17.enum(["auto", "flex", "priority", "default"]).nullish(),
2907
2909
  store: z17.boolean().nullish(),
2908
2910
  strictJsonSchema: z17.boolean().nullish(),
2909
2911
  textVerbosity: z17.enum(["low", "medium", "high"]).nullish(),
@@ -4395,7 +4397,7 @@ var OpenAITranscriptionModel = class {
4395
4397
  };
4396
4398
 
4397
4399
  // src/version.ts
4398
- var VERSION = true ? "2.0.45" : "0.0.0-test";
4400
+ var VERSION = true ? "2.0.46" : "0.0.0-test";
4399
4401
 
4400
4402
  // src/openai-provider.ts
4401
4403
  function createOpenAI(options = {}) {