@ai-sdk/openai 2.0.22 → 2.0.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1930,7 +1930,12 @@ var OpenAITranscriptionModel = class {
1930
1930
  const formData = new FormData();
1931
1931
  const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils9.convertBase64ToUint8Array)(audio)]);
1932
1932
  formData.append("model", this.modelId);
1933
- formData.append("file", new File([blob], "audio", { type: mediaType }));
1933
+ const fileExtension = (0, import_provider_utils9.mediaTypeToExtension)(mediaType);
1934
+ formData.append(
1935
+ "file",
1936
+ new File([blob], "audio", { type: mediaType }),
1937
+ `audio.${fileExtension}`
1938
+ );
1934
1939
  if (openAIOptions) {
1935
1940
  const transcriptionModelOptions = {
1936
1941
  include: openAIOptions.include,
@@ -2783,6 +2788,7 @@ var OpenAIResponsesLanguageModel = class {
2783
2788
  })
2784
2789
  ])
2785
2790
  ),
2791
+ service_tier: import_v416.z.string().nullish(),
2786
2792
  incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullable(),
2787
2793
  usage: usageSchema2
2788
2794
  })
@@ -2941,6 +2947,9 @@ var OpenAIResponsesLanguageModel = class {
2941
2947
  if (logprobs.length > 0) {
2942
2948
  providerMetadata.openai.logprobs = logprobs;
2943
2949
  }
2950
+ if (typeof response.service_tier === "string") {
2951
+ providerMetadata.openai.serviceTier = response.service_tier;
2952
+ }
2944
2953
  return {
2945
2954
  content,
2946
2955
  finishReason: mapOpenAIResponseFinishReason({
@@ -2997,6 +3006,7 @@ var OpenAIResponsesLanguageModel = class {
2997
3006
  const ongoingToolCalls = {};
2998
3007
  let hasToolCalls = false;
2999
3008
  const activeReasoning = {};
3009
+ let serviceTier;
3000
3010
  return {
3001
3011
  stream: response.pipeThrough(
3002
3012
  new TransformStream({
@@ -3255,6 +3265,9 @@ var OpenAIResponsesLanguageModel = class {
3255
3265
  usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
3256
3266
  usage.reasoningTokens = (_j = (_i = value.response.usage.output_tokens_details) == null ? void 0 : _i.reasoning_tokens) != null ? _j : void 0;
3257
3267
  usage.cachedInputTokens = (_l = (_k = value.response.usage.input_tokens_details) == null ? void 0 : _k.cached_tokens) != null ? _l : void 0;
3268
+ if (typeof value.response.service_tier === "string") {
3269
+ serviceTier = value.response.service_tier;
3270
+ }
3258
3271
  } else if (isResponseAnnotationAddedChunk(value)) {
3259
3272
  if (value.annotation.type === "url_citation") {
3260
3273
  controller.enqueue({
@@ -3287,6 +3300,9 @@ var OpenAIResponsesLanguageModel = class {
3287
3300
  if (logprobs.length > 0) {
3288
3301
  providerMetadata.openai.logprobs = logprobs;
3289
3302
  }
3303
+ if (serviceTier !== void 0) {
3304
+ providerMetadata.openai.serviceTier = serviceTier;
3305
+ }
3290
3306
  controller.enqueue({
3291
3307
  type: "finish",
3292
3308
  finishReason,
@@ -3324,7 +3340,8 @@ var responseFinishedChunkSchema = import_v416.z.object({
3324
3340
  type: import_v416.z.enum(["response.completed", "response.incomplete"]),
3325
3341
  response: import_v416.z.object({
3326
3342
  incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
3327
- usage: usageSchema2
3343
+ usage: usageSchema2,
3344
+ service_tier: import_v416.z.string().nullish()
3328
3345
  })
3329
3346
  });
3330
3347
  var responseCreatedChunkSchema = import_v416.z.object({
@@ -3332,7 +3349,8 @@ var responseCreatedChunkSchema = import_v416.z.object({
3332
3349
  response: import_v416.z.object({
3333
3350
  id: import_v416.z.string(),
3334
3351
  created_at: import_v416.z.number(),
3335
- model: import_v416.z.string()
3352
+ model: import_v416.z.string(),
3353
+ service_tier: import_v416.z.string().nullish()
3336
3354
  })
3337
3355
  });
3338
3356
  var responseOutputItemAddedSchema = import_v416.z.object({