@ai-sdk/openai 2.0.0-canary.2 → 2.0.0-canary.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -26,16 +26,15 @@ __export(src_exports, {
26
26
  module.exports = __toCommonJS(src_exports);
27
27
 
28
28
  // src/openai-provider.ts
29
- var import_provider_utils9 = require("@ai-sdk/provider-utils");
29
+ var import_provider_utils8 = require("@ai-sdk/provider-utils");
30
30
 
31
31
  // src/openai-chat-language-model.ts
32
32
  var import_provider3 = require("@ai-sdk/provider");
33
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
33
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
34
34
  var import_zod2 = require("zod");
35
35
 
36
36
  // src/convert-to-openai-chat-messages.ts
37
37
  var import_provider = require("@ai-sdk/provider");
38
- var import_provider_utils = require("@ai-sdk/provider-utils");
39
38
  function convertToOpenAIChatMessages({
40
39
  prompt,
41
40
  useLegacyFunctionCalling = false,
@@ -79,55 +78,65 @@ function convertToOpenAIChatMessages({
79
78
  messages.push({
80
79
  role: "user",
81
80
  content: content.map((part, index) => {
82
- var _a, _b, _c, _d;
81
+ var _a, _b, _c;
83
82
  switch (part.type) {
84
83
  case "text": {
85
84
  return { type: "text", text: part.text };
86
85
  }
87
- case "image": {
88
- return {
89
- type: "image_url",
90
- image_url: {
91
- url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`,
92
- // OpenAI specific extension: image detail
93
- detail: (_c = (_b = part.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
94
- }
95
- };
96
- }
97
86
  case "file": {
98
- if (part.data instanceof URL) {
99
- throw new import_provider.UnsupportedFunctionalityError({
100
- functionality: "'File content parts with URL data' functionality not supported."
101
- });
102
- }
103
- switch (part.mimeType) {
104
- case "audio/wav": {
105
- return {
106
- type: "input_audio",
107
- input_audio: { data: part.data, format: "wav" }
108
- };
109
- }
110
- case "audio/mp3":
111
- case "audio/mpeg": {
112
- return {
113
- type: "input_audio",
114
- input_audio: { data: part.data, format: "mp3" }
115
- };
87
+ if (part.mediaType.startsWith("image/")) {
88
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
89
+ return {
90
+ type: "image_url",
91
+ image_url: {
92
+ url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
93
+ // OpenAI specific extension: image detail
94
+ detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
95
+ }
96
+ };
97
+ } else if (part.mediaType.startsWith("audio/")) {
98
+ if (part.data instanceof URL) {
99
+ throw new import_provider.UnsupportedFunctionalityError({
100
+ functionality: "audio file parts with URLs"
101
+ });
116
102
  }
117
- case "application/pdf": {
118
- return {
119
- type: "file",
120
- file: {
121
- filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
122
- file_data: `data:application/pdf;base64,${part.data}`
123
- }
124
- };
103
+ switch (part.mediaType) {
104
+ case "audio/wav": {
105
+ return {
106
+ type: "input_audio",
107
+ input_audio: { data: part.data, format: "wav" }
108
+ };
109
+ }
110
+ case "audio/mp3":
111
+ case "audio/mpeg": {
112
+ return {
113
+ type: "input_audio",
114
+ input_audio: { data: part.data, format: "mp3" }
115
+ };
116
+ }
117
+ default: {
118
+ throw new import_provider.UnsupportedFunctionalityError({
119
+ functionality: `audio content parts with media type ${part.mediaType}`
120
+ });
121
+ }
125
122
  }
126
- default: {
123
+ } else if (part.mediaType === "application/pdf") {
124
+ if (part.data instanceof URL) {
127
125
  throw new import_provider.UnsupportedFunctionalityError({
128
- functionality: `File content part type ${part.mimeType} in user messages`
126
+ functionality: "PDF file parts with URLs"
129
127
  });
130
128
  }
129
+ return {
130
+ type: "file",
131
+ file: {
132
+ filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
133
+ file_data: `data:application/pdf;base64,${part.data}`
134
+ }
135
+ };
136
+ } else {
137
+ throw new import_provider.UnsupportedFunctionalityError({
138
+ functionality: `file part media type ${part.mediaType}`
139
+ });
131
140
  }
132
141
  }
133
142
  }
@@ -236,7 +245,7 @@ function mapOpenAIFinishReason(finishReason) {
236
245
 
237
246
  // src/openai-error.ts
238
247
  var import_zod = require("zod");
239
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
248
+ var import_provider_utils = require("@ai-sdk/provider-utils");
240
249
  var openaiErrorDataSchema = import_zod.z.object({
241
250
  error: import_zod.z.object({
242
251
  message: import_zod.z.string(),
@@ -248,7 +257,7 @@ var openaiErrorDataSchema = import_zod.z.object({
248
257
  code: import_zod.z.union([import_zod.z.string(), import_zod.z.number()]).nullish()
249
258
  })
250
259
  });
251
- var openaiFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
260
+ var openaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
252
261
  errorSchema: openaiErrorDataSchema,
253
262
  errorToMessage: (data) => data.error.message
254
263
  });
@@ -360,7 +369,7 @@ function prepareTools({
360
369
  default: {
361
370
  const _exhaustiveCheck = type;
362
371
  throw new import_provider2.UnsupportedFunctionalityError({
363
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
372
+ functionality: `tool choice type: ${_exhaustiveCheck}`
364
373
  });
365
374
  }
366
375
  }
@@ -567,15 +576,15 @@ var OpenAIChatLanguageModel = class {
567
576
  responseHeaders,
568
577
  value: response,
569
578
  rawValue: rawResponse
570
- } = await (0, import_provider_utils3.postJsonToApi)({
579
+ } = await (0, import_provider_utils2.postJsonToApi)({
571
580
  url: this.config.url({
572
581
  path: "/chat/completions",
573
582
  modelId: this.modelId
574
583
  }),
575
- headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
584
+ headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
576
585
  body,
577
586
  failedResponseHandler: openaiFailedResponseHandler,
578
- successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
587
+ successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
579
588
  openaiChatResponseSchema
580
589
  ),
581
590
  abortSignal: options.abortSignal,
@@ -603,7 +612,7 @@ var OpenAIChatLanguageModel = class {
603
612
  toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
604
613
  {
605
614
  toolCallType: "function",
606
- toolCallId: (0, import_provider_utils3.generateId)(),
615
+ toolCallId: (0, import_provider_utils2.generateId)(),
607
616
  toolName: choice.message.function_call.name,
608
617
  args: choice.message.function_call.arguments
609
618
  }
@@ -611,7 +620,7 @@ var OpenAIChatLanguageModel = class {
611
620
  var _a2;
612
621
  return {
613
622
  toolCallType: "function",
614
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
623
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils2.generateId)(),
615
624
  toolName: toolCall.function.name,
616
625
  args: toolCall.function.arguments
617
626
  };
@@ -622,9 +631,12 @@ var OpenAIChatLanguageModel = class {
622
631
  completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : NaN
623
632
  },
624
633
  rawCall: { rawPrompt, rawSettings },
625
- rawResponse: { headers: responseHeaders, body: rawResponse },
626
634
  request: { body: JSON.stringify(body) },
627
- response: getResponseMetadata(response),
635
+ response: {
636
+ ...getResponseMetadata(response),
637
+ headers: responseHeaders,
638
+ body: rawResponse
639
+ },
628
640
  warnings,
629
641
  logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
630
642
  providerMetadata
@@ -670,7 +682,7 @@ var OpenAIChatLanguageModel = class {
670
682
  return {
671
683
  stream: simulatedStream,
672
684
  rawCall: result.rawCall,
673
- rawResponse: result.rawResponse,
685
+ response: result.response,
674
686
  warnings: result.warnings
675
687
  };
676
688
  }
@@ -681,15 +693,15 @@ var OpenAIChatLanguageModel = class {
681
693
  // only include stream_options when in strict compatibility mode:
682
694
  stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
683
695
  };
684
- const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
696
+ const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
685
697
  url: this.config.url({
686
698
  path: "/chat/completions",
687
699
  modelId: this.modelId
688
700
  }),
689
- headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
701
+ headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
690
702
  body,
691
703
  failedResponseHandler: openaiFailedResponseHandler,
692
- successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
704
+ successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
693
705
  openaiChatChunkSchema
694
706
  ),
695
707
  abortSignal: options.abortSignal,
@@ -777,7 +789,7 @@ var OpenAIChatLanguageModel = class {
777
789
  const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
778
790
  {
779
791
  type: "function",
780
- id: (0, import_provider_utils3.generateId)(),
792
+ id: (0, import_provider_utils2.generateId)(),
781
793
  function: delta.function_call,
782
794
  index: 0
783
795
  }
@@ -824,11 +836,11 @@ var OpenAIChatLanguageModel = class {
824
836
  argsTextDelta: toolCall2.function.arguments
825
837
  });
826
838
  }
827
- if ((0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
839
+ if ((0, import_provider_utils2.isParsableJson)(toolCall2.function.arguments)) {
828
840
  controller.enqueue({
829
841
  type: "tool-call",
830
842
  toolCallType: "function",
831
- toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils3.generateId)(),
843
+ toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils2.generateId)(),
832
844
  toolName: toolCall2.function.name,
833
845
  args: toolCall2.function.arguments
834
846
  });
@@ -851,11 +863,11 @@ var OpenAIChatLanguageModel = class {
851
863
  toolName: toolCall.function.name,
852
864
  argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
853
865
  });
854
- if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
866
+ if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
855
867
  controller.enqueue({
856
868
  type: "tool-call",
857
869
  toolCallType: "function",
858
- toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils3.generateId)(),
870
+ toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils2.generateId)(),
859
871
  toolName: toolCall.function.name,
860
872
  args: toolCall.function.arguments
861
873
  });
@@ -880,7 +892,7 @@ var OpenAIChatLanguageModel = class {
880
892
  })
881
893
  ),
882
894
  rawCall: { rawPrompt, rawSettings },
883
- rawResponse: { headers: responseHeaders },
895
+ response: { headers: responseHeaders },
884
896
  request: { body: JSON.stringify(body) },
885
897
  warnings
886
898
  };
@@ -1025,7 +1037,7 @@ var reasoningModels = {
1025
1037
  };
1026
1038
 
1027
1039
  // src/openai-completion-language-model.ts
1028
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
1040
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
1029
1041
  var import_zod3 = require("zod");
1030
1042
 
1031
1043
  // src/convert-to-openai-completion-prompt.ts
@@ -1060,13 +1072,8 @@ function convertToOpenAICompletionPrompt({
1060
1072
  case "text": {
1061
1073
  return part.text;
1062
1074
  }
1063
- case "image": {
1064
- throw new import_provider4.UnsupportedFunctionalityError({
1065
- functionality: "images"
1066
- });
1067
- }
1068
1075
  }
1069
- }).join("");
1076
+ }).filter(Boolean).join("");
1070
1077
  text += `${user}:
1071
1078
  ${userMessage}
1072
1079
 
@@ -1203,15 +1210,15 @@ var OpenAICompletionLanguageModel = class {
1203
1210
  responseHeaders,
1204
1211
  value: response,
1205
1212
  rawValue: rawResponse
1206
- } = await (0, import_provider_utils4.postJsonToApi)({
1213
+ } = await (0, import_provider_utils3.postJsonToApi)({
1207
1214
  url: this.config.url({
1208
1215
  path: "/completions",
1209
1216
  modelId: this.modelId
1210
1217
  }),
1211
- headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
1218
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
1212
1219
  body: args,
1213
1220
  failedResponseHandler: openaiFailedResponseHandler,
1214
- successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
1221
+ successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
1215
1222
  openaiCompletionResponseSchema
1216
1223
  ),
1217
1224
  abortSignal: options.abortSignal,
@@ -1228,10 +1235,13 @@ var OpenAICompletionLanguageModel = class {
1228
1235
  finishReason: mapOpenAIFinishReason(choice.finish_reason),
1229
1236
  logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
1230
1237
  rawCall: { rawPrompt, rawSettings },
1231
- rawResponse: { headers: responseHeaders, body: rawResponse },
1232
- response: getResponseMetadata(response),
1233
- warnings,
1234
- request: { body: JSON.stringify(args) }
1238
+ request: { body: JSON.stringify(args) },
1239
+ response: {
1240
+ ...getResponseMetadata(response),
1241
+ headers: responseHeaders,
1242
+ body: rawResponse
1243
+ },
1244
+ warnings
1235
1245
  };
1236
1246
  }
1237
1247
  async doStream(options) {
@@ -1242,15 +1252,15 @@ var OpenAICompletionLanguageModel = class {
1242
1252
  // only include stream_options when in strict compatibility mode:
1243
1253
  stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
1244
1254
  };
1245
- const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
1255
+ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
1246
1256
  url: this.config.url({
1247
1257
  path: "/completions",
1248
1258
  modelId: this.modelId
1249
1259
  }),
1250
- headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
1260
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
1251
1261
  body,
1252
1262
  failedResponseHandler: openaiFailedResponseHandler,
1253
- successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
1263
+ successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
1254
1264
  openaiCompletionChunkSchema
1255
1265
  ),
1256
1266
  abortSignal: options.abortSignal,
@@ -1321,7 +1331,7 @@ var OpenAICompletionLanguageModel = class {
1321
1331
  })
1322
1332
  ),
1323
1333
  rawCall: { rawPrompt, rawSettings },
1324
- rawResponse: { headers: responseHeaders },
1334
+ response: { headers: responseHeaders },
1325
1335
  warnings,
1326
1336
  request: { body: JSON.stringify(body) }
1327
1337
  };
@@ -1374,7 +1384,7 @@ var openaiCompletionChunkSchema = import_zod3.z.union([
1374
1384
 
1375
1385
  // src/openai-embedding-model.ts
1376
1386
  var import_provider5 = require("@ai-sdk/provider");
1377
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
1387
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
1378
1388
  var import_zod4 = require("zod");
1379
1389
  var OpenAIEmbeddingModel = class {
1380
1390
  constructor(modelId, settings, config) {
@@ -1407,12 +1417,12 @@ var OpenAIEmbeddingModel = class {
1407
1417
  values
1408
1418
  });
1409
1419
  }
1410
- const { responseHeaders, value: response } = await (0, import_provider_utils5.postJsonToApi)({
1420
+ const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
1411
1421
  url: this.config.url({
1412
1422
  path: "/embeddings",
1413
1423
  modelId: this.modelId
1414
1424
  }),
1415
- headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
1425
+ headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), headers),
1416
1426
  body: {
1417
1427
  model: this.modelId,
1418
1428
  input: values,
@@ -1421,7 +1431,7 @@ var OpenAIEmbeddingModel = class {
1421
1431
  user: this.settings.user
1422
1432
  },
1423
1433
  failedResponseHandler: openaiFailedResponseHandler,
1424
- successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
1434
+ successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
1425
1435
  openaiTextEmbeddingResponseSchema
1426
1436
  ),
1427
1437
  abortSignal,
@@ -1440,7 +1450,7 @@ var openaiTextEmbeddingResponseSchema = import_zod4.z.object({
1440
1450
  });
1441
1451
 
1442
1452
  // src/openai-image-model.ts
1443
- var import_provider_utils6 = require("@ai-sdk/provider-utils");
1453
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
1444
1454
  var import_zod5 = require("zod");
1445
1455
 
1446
1456
  // src/openai-image-settings.ts
@@ -1487,12 +1497,12 @@ var OpenAIImageModel = class {
1487
1497
  warnings.push({ type: "unsupported-setting", setting: "seed" });
1488
1498
  }
1489
1499
  const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1490
- const { value: response, responseHeaders } = await (0, import_provider_utils6.postJsonToApi)({
1500
+ const { value: response, responseHeaders } = await (0, import_provider_utils5.postJsonToApi)({
1491
1501
  url: this.config.url({
1492
1502
  path: "/images/generations",
1493
1503
  modelId: this.modelId
1494
1504
  }),
1495
- headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), headers),
1505
+ headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
1496
1506
  body: {
1497
1507
  model: this.modelId,
1498
1508
  prompt,
@@ -1502,7 +1512,7 @@ var OpenAIImageModel = class {
1502
1512
  response_format: "b64_json"
1503
1513
  },
1504
1514
  failedResponseHandler: openaiFailedResponseHandler,
1505
- successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
1515
+ successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
1506
1516
  openaiImageResponseSchema
1507
1517
  ),
1508
1518
  abortSignal,
@@ -1544,13 +1554,186 @@ var openaiTools = {
1544
1554
  webSearchPreview: webSearchPreviewTool
1545
1555
  };
1546
1556
 
1547
- // src/responses/openai-responses-language-model.ts
1548
- var import_provider_utils8 = require("@ai-sdk/provider-utils");
1557
+ // src/openai-transcription-model.ts
1558
+ var import_provider_utils6 = require("@ai-sdk/provider-utils");
1549
1559
  var import_zod7 = require("zod");
1560
+ var OpenAIProviderOptionsSchema = import_zod7.z.object({
1561
+ include: import_zod7.z.array(import_zod7.z.string()).optional().describe(
1562
+ "Additional information to include in the transcription response."
1563
+ ),
1564
+ language: import_zod7.z.string().optional().describe("The language of the input audio in ISO-639-1 format."),
1565
+ prompt: import_zod7.z.string().optional().describe(
1566
+ "An optional text to guide the model's style or continue a previous audio segment."
1567
+ ),
1568
+ temperature: import_zod7.z.number().min(0).max(1).optional().default(0).describe("The sampling temperature, between 0 and 1."),
1569
+ timestampGranularities: import_zod7.z.array(import_zod7.z.enum(["word", "segment"])).optional().default(["segment"]).describe(
1570
+ "The timestamp granularities to populate for this transcription."
1571
+ )
1572
+ });
1573
+ var languageMap = {
1574
+ afrikaans: "af",
1575
+ arabic: "ar",
1576
+ armenian: "hy",
1577
+ azerbaijani: "az",
1578
+ belarusian: "be",
1579
+ bosnian: "bs",
1580
+ bulgarian: "bg",
1581
+ catalan: "ca",
1582
+ chinese: "zh",
1583
+ croatian: "hr",
1584
+ czech: "cs",
1585
+ danish: "da",
1586
+ dutch: "nl",
1587
+ english: "en",
1588
+ estonian: "et",
1589
+ finnish: "fi",
1590
+ french: "fr",
1591
+ galician: "gl",
1592
+ german: "de",
1593
+ greek: "el",
1594
+ hebrew: "he",
1595
+ hindi: "hi",
1596
+ hungarian: "hu",
1597
+ icelandic: "is",
1598
+ indonesian: "id",
1599
+ italian: "it",
1600
+ japanese: "ja",
1601
+ kannada: "kn",
1602
+ kazakh: "kk",
1603
+ korean: "ko",
1604
+ latvian: "lv",
1605
+ lithuanian: "lt",
1606
+ macedonian: "mk",
1607
+ malay: "ms",
1608
+ marathi: "mr",
1609
+ maori: "mi",
1610
+ nepali: "ne",
1611
+ norwegian: "no",
1612
+ persian: "fa",
1613
+ polish: "pl",
1614
+ portuguese: "pt",
1615
+ romanian: "ro",
1616
+ russian: "ru",
1617
+ serbian: "sr",
1618
+ slovak: "sk",
1619
+ slovenian: "sl",
1620
+ spanish: "es",
1621
+ swahili: "sw",
1622
+ swedish: "sv",
1623
+ tagalog: "tl",
1624
+ tamil: "ta",
1625
+ thai: "th",
1626
+ turkish: "tr",
1627
+ ukrainian: "uk",
1628
+ urdu: "ur",
1629
+ vietnamese: "vi",
1630
+ welsh: "cy"
1631
+ };
1632
+ var OpenAITranscriptionModel = class {
1633
+ constructor(modelId, config) {
1634
+ this.modelId = modelId;
1635
+ this.config = config;
1636
+ this.specificationVersion = "v1";
1637
+ }
1638
+ get provider() {
1639
+ return this.config.provider;
1640
+ }
1641
+ getArgs({
1642
+ audio,
1643
+ mediaType,
1644
+ providerOptions
1645
+ }) {
1646
+ const warnings = [];
1647
+ const openAIOptions = (0, import_provider_utils6.parseProviderOptions)({
1648
+ provider: "openai",
1649
+ providerOptions,
1650
+ schema: OpenAIProviderOptionsSchema
1651
+ });
1652
+ const formData = new FormData();
1653
+ const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils6.convertBase64ToUint8Array)(audio)]);
1654
+ formData.append("model", this.modelId);
1655
+ formData.append("file", new File([blob], "audio", { type: mediaType }));
1656
+ if (openAIOptions) {
1657
+ const transcriptionModelOptions = {
1658
+ include: openAIOptions.include,
1659
+ language: openAIOptions.language,
1660
+ prompt: openAIOptions.prompt,
1661
+ temperature: openAIOptions.temperature,
1662
+ timestamp_granularities: openAIOptions.timestampGranularities
1663
+ };
1664
+ for (const key in transcriptionModelOptions) {
1665
+ const value = transcriptionModelOptions[key];
1666
+ if (value !== void 0) {
1667
+ formData.append(key, value);
1668
+ }
1669
+ }
1670
+ }
1671
+ return {
1672
+ formData,
1673
+ warnings
1674
+ };
1675
+ }
1676
+ async doGenerate(options) {
1677
+ var _a, _b, _c, _d, _e, _f;
1678
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1679
+ const { formData, warnings } = this.getArgs(options);
1680
+ const {
1681
+ value: response,
1682
+ responseHeaders,
1683
+ rawValue: rawResponse
1684
+ } = await (0, import_provider_utils6.postFormDataToApi)({
1685
+ url: this.config.url({
1686
+ path: "/audio/transcriptions",
1687
+ modelId: this.modelId
1688
+ }),
1689
+ headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), options.headers),
1690
+ formData,
1691
+ failedResponseHandler: openaiFailedResponseHandler,
1692
+ successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
1693
+ openaiTranscriptionResponseSchema
1694
+ ),
1695
+ abortSignal: options.abortSignal,
1696
+ fetch: this.config.fetch
1697
+ });
1698
+ const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
1699
+ return {
1700
+ text: response.text,
1701
+ segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
1702
+ text: word.word,
1703
+ startSecond: word.start,
1704
+ endSecond: word.end
1705
+ }))) != null ? _e : [],
1706
+ language,
1707
+ durationInSeconds: (_f = response.duration) != null ? _f : void 0,
1708
+ warnings,
1709
+ response: {
1710
+ timestamp: currentDate,
1711
+ modelId: this.modelId,
1712
+ headers: responseHeaders,
1713
+ body: rawResponse
1714
+ }
1715
+ };
1716
+ }
1717
+ };
1718
+ var openaiTranscriptionResponseSchema = import_zod7.z.object({
1719
+ text: import_zod7.z.string(),
1720
+ language: import_zod7.z.string().nullish(),
1721
+ duration: import_zod7.z.number().nullish(),
1722
+ words: import_zod7.z.array(
1723
+ import_zod7.z.object({
1724
+ word: import_zod7.z.string(),
1725
+ start: import_zod7.z.number(),
1726
+ end: import_zod7.z.number()
1727
+ })
1728
+ ).nullish()
1729
+ });
1730
+
1731
+ // src/responses/openai-responses-language-model.ts
1732
+ var import_provider_utils7 = require("@ai-sdk/provider-utils");
1733
+ var import_zod8 = require("zod");
1550
1734
 
1551
1735
  // src/responses/convert-to-openai-responses-messages.ts
1552
1736
  var import_provider6 = require("@ai-sdk/provider");
1553
- var import_provider_utils7 = require("@ai-sdk/provider-utils");
1554
1737
  function convertToOpenAIResponsesMessages({
1555
1738
  prompt,
1556
1739
  systemMessageMode
@@ -1589,38 +1772,35 @@ function convertToOpenAIResponsesMessages({
1589
1772
  messages.push({
1590
1773
  role: "user",
1591
1774
  content: content.map((part, index) => {
1592
- var _a, _b, _c, _d;
1775
+ var _a, _b, _c;
1593
1776
  switch (part.type) {
1594
1777
  case "text": {
1595
1778
  return { type: "input_text", text: part.text };
1596
1779
  }
1597
- case "image": {
1598
- return {
1599
- type: "input_image",
1600
- image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils7.convertUint8ArrayToBase64)(part.image)}`,
1601
- // OpenAI specific extension: image detail
1602
- detail: (_c = (_b = part.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
1603
- };
1604
- }
1605
1780
  case "file": {
1606
- if (part.data instanceof URL) {
1607
- throw new import_provider6.UnsupportedFunctionalityError({
1608
- functionality: "File URLs in user messages"
1609
- });
1610
- }
1611
- switch (part.mimeType) {
1612
- case "application/pdf": {
1613
- return {
1614
- type: "input_file",
1615
- filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
1616
- file_data: `data:application/pdf;base64,${part.data}`
1617
- };
1618
- }
1619
- default: {
1781
+ if (part.mediaType.startsWith("image/")) {
1782
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
1783
+ return {
1784
+ type: "input_image",
1785
+ image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
1786
+ // OpenAI specific extension: image detail
1787
+ detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
1788
+ };
1789
+ } else if (part.mediaType === "application/pdf") {
1790
+ if (part.data instanceof URL) {
1620
1791
  throw new import_provider6.UnsupportedFunctionalityError({
1621
- functionality: "Only PDF files are supported in user messages"
1792
+ functionality: "PDF file parts with URLs"
1622
1793
  });
1623
1794
  }
1795
+ return {
1796
+ type: "input_file",
1797
+ filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
1798
+ file_data: `data:application/pdf;base64,${part.data}`
1799
+ };
1800
+ } else {
1801
+ throw new import_provider6.UnsupportedFunctionalityError({
1802
+ functionality: `file part media type ${part.mediaType}`
1803
+ });
1624
1804
  }
1625
1805
  }
1626
1806
  }
@@ -1749,7 +1929,7 @@ function prepareResponsesTools({
1749
1929
  default: {
1750
1930
  const _exhaustiveCheck = type;
1751
1931
  throw new import_provider7.UnsupportedFunctionalityError({
1752
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
1932
+ functionality: `tool choice type: ${_exhaustiveCheck}`
1753
1933
  });
1754
1934
  }
1755
1935
  }
@@ -1810,7 +1990,7 @@ var OpenAIResponsesLanguageModel = class {
1810
1990
  systemMessageMode: modelConfig.systemMessageMode
1811
1991
  });
1812
1992
  warnings.push(...messageWarnings);
1813
- const openaiOptions = (0, import_provider_utils8.parseProviderOptions)({
1993
+ const openaiOptions = (0, import_provider_utils7.parseProviderOptions)({
1814
1994
  provider: "openai",
1815
1995
  providerOptions,
1816
1996
  schema: openaiResponsesProviderOptionsSchema
@@ -1891,58 +2071,58 @@ var OpenAIResponsesLanguageModel = class {
1891
2071
  responseHeaders,
1892
2072
  value: response,
1893
2073
  rawValue: rawResponse
1894
- } = await (0, import_provider_utils8.postJsonToApi)({
2074
+ } = await (0, import_provider_utils7.postJsonToApi)({
1895
2075
  url: this.config.url({
1896
2076
  path: "/responses",
1897
2077
  modelId: this.modelId
1898
2078
  }),
1899
- headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), options.headers),
2079
+ headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), options.headers),
1900
2080
  body,
1901
2081
  failedResponseHandler: openaiFailedResponseHandler,
1902
- successfulResponseHandler: (0, import_provider_utils8.createJsonResponseHandler)(
1903
- import_zod7.z.object({
1904
- id: import_zod7.z.string(),
1905
- created_at: import_zod7.z.number(),
1906
- model: import_zod7.z.string(),
1907
- output: import_zod7.z.array(
1908
- import_zod7.z.discriminatedUnion("type", [
1909
- import_zod7.z.object({
1910
- type: import_zod7.z.literal("message"),
1911
- role: import_zod7.z.literal("assistant"),
1912
- content: import_zod7.z.array(
1913
- import_zod7.z.object({
1914
- type: import_zod7.z.literal("output_text"),
1915
- text: import_zod7.z.string(),
1916
- annotations: import_zod7.z.array(
1917
- import_zod7.z.object({
1918
- type: import_zod7.z.literal("url_citation"),
1919
- start_index: import_zod7.z.number(),
1920
- end_index: import_zod7.z.number(),
1921
- url: import_zod7.z.string(),
1922
- title: import_zod7.z.string()
2082
+ successfulResponseHandler: (0, import_provider_utils7.createJsonResponseHandler)(
2083
+ import_zod8.z.object({
2084
+ id: import_zod8.z.string(),
2085
+ created_at: import_zod8.z.number(),
2086
+ model: import_zod8.z.string(),
2087
+ output: import_zod8.z.array(
2088
+ import_zod8.z.discriminatedUnion("type", [
2089
+ import_zod8.z.object({
2090
+ type: import_zod8.z.literal("message"),
2091
+ role: import_zod8.z.literal("assistant"),
2092
+ content: import_zod8.z.array(
2093
+ import_zod8.z.object({
2094
+ type: import_zod8.z.literal("output_text"),
2095
+ text: import_zod8.z.string(),
2096
+ annotations: import_zod8.z.array(
2097
+ import_zod8.z.object({
2098
+ type: import_zod8.z.literal("url_citation"),
2099
+ start_index: import_zod8.z.number(),
2100
+ end_index: import_zod8.z.number(),
2101
+ url: import_zod8.z.string(),
2102
+ title: import_zod8.z.string()
1923
2103
  })
1924
2104
  )
1925
2105
  })
1926
2106
  )
1927
2107
  }),
1928
- import_zod7.z.object({
1929
- type: import_zod7.z.literal("function_call"),
1930
- call_id: import_zod7.z.string(),
1931
- name: import_zod7.z.string(),
1932
- arguments: import_zod7.z.string()
2108
+ import_zod8.z.object({
2109
+ type: import_zod8.z.literal("function_call"),
2110
+ call_id: import_zod8.z.string(),
2111
+ name: import_zod8.z.string(),
2112
+ arguments: import_zod8.z.string()
1933
2113
  }),
1934
- import_zod7.z.object({
1935
- type: import_zod7.z.literal("web_search_call")
2114
+ import_zod8.z.object({
2115
+ type: import_zod8.z.literal("web_search_call")
1936
2116
  }),
1937
- import_zod7.z.object({
1938
- type: import_zod7.z.literal("computer_call")
2117
+ import_zod8.z.object({
2118
+ type: import_zod8.z.literal("computer_call")
1939
2119
  }),
1940
- import_zod7.z.object({
1941
- type: import_zod7.z.literal("reasoning")
2120
+ import_zod8.z.object({
2121
+ type: import_zod8.z.literal("reasoning")
1942
2122
  })
1943
2123
  ])
1944
2124
  ),
1945
- incomplete_details: import_zod7.z.object({ reason: import_zod7.z.string() }).nullable(),
2125
+ incomplete_details: import_zod8.z.object({ reason: import_zod8.z.string() }).nullable(),
1946
2126
  usage: usageSchema
1947
2127
  })
1948
2128
  ),
@@ -1963,7 +2143,7 @@ var OpenAIResponsesLanguageModel = class {
1963
2143
  var _a2, _b2, _c2;
1964
2144
  return {
1965
2145
  sourceType: "url",
1966
- id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : (0, import_provider_utils8.generateId)(),
2146
+ id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : (0, import_provider_utils7.generateId)(),
1967
2147
  url: annotation.url,
1968
2148
  title: annotation.title
1969
2149
  };
@@ -1982,17 +2162,15 @@ var OpenAIResponsesLanguageModel = class {
1982
2162
  rawPrompt: void 0,
1983
2163
  rawSettings: {}
1984
2164
  },
1985
- rawResponse: {
1986
- headers: responseHeaders,
1987
- body: rawResponse
1988
- },
1989
2165
  request: {
1990
2166
  body: JSON.stringify(body)
1991
2167
  },
1992
2168
  response: {
1993
2169
  id: response.id,
1994
2170
  timestamp: new Date(response.created_at * 1e3),
1995
- modelId: response.model
2171
+ modelId: response.model,
2172
+ headers: responseHeaders,
2173
+ body: rawResponse
1996
2174
  },
1997
2175
  providerMetadata: {
1998
2176
  openai: {
@@ -2006,18 +2184,18 @@ var OpenAIResponsesLanguageModel = class {
2006
2184
  }
2007
2185
  async doStream(options) {
2008
2186
  const { args: body, warnings } = this.getArgs(options);
2009
- const { responseHeaders, value: response } = await (0, import_provider_utils8.postJsonToApi)({
2187
+ const { responseHeaders, value: response } = await (0, import_provider_utils7.postJsonToApi)({
2010
2188
  url: this.config.url({
2011
2189
  path: "/responses",
2012
2190
  modelId: this.modelId
2013
2191
  }),
2014
- headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), options.headers),
2192
+ headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), options.headers),
2015
2193
  body: {
2016
2194
  ...body,
2017
2195
  stream: true
2018
2196
  },
2019
2197
  failedResponseHandler: openaiFailedResponseHandler,
2020
- successfulResponseHandler: (0, import_provider_utils8.createEventSourceResponseHandler)(
2198
+ successfulResponseHandler: (0, import_provider_utils7.createEventSourceResponseHandler)(
2021
2199
  openaiResponsesChunkSchema
2022
2200
  ),
2023
2201
  abortSignal: options.abortSignal,
@@ -2105,7 +2283,7 @@ var OpenAIResponsesLanguageModel = class {
2105
2283
  type: "source",
2106
2284
  source: {
2107
2285
  sourceType: "url",
2108
- id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils8.generateId)(),
2286
+ id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils7.generateId)(),
2109
2287
  url: value.annotation.url,
2110
2288
  title: value.annotation.title
2111
2289
  }
@@ -2134,85 +2312,85 @@ var OpenAIResponsesLanguageModel = class {
2134
2312
  rawPrompt: void 0,
2135
2313
  rawSettings: {}
2136
2314
  },
2137
- rawResponse: { headers: responseHeaders },
2138
2315
  request: { body: JSON.stringify(body) },
2316
+ response: { headers: responseHeaders },
2139
2317
  warnings
2140
2318
  };
2141
2319
  }
2142
2320
  };
2143
- var usageSchema = import_zod7.z.object({
2144
- input_tokens: import_zod7.z.number(),
2145
- input_tokens_details: import_zod7.z.object({ cached_tokens: import_zod7.z.number().nullish() }).nullish(),
2146
- output_tokens: import_zod7.z.number(),
2147
- output_tokens_details: import_zod7.z.object({ reasoning_tokens: import_zod7.z.number().nullish() }).nullish()
2321
+ var usageSchema = import_zod8.z.object({
2322
+ input_tokens: import_zod8.z.number(),
2323
+ input_tokens_details: import_zod8.z.object({ cached_tokens: import_zod8.z.number().nullish() }).nullish(),
2324
+ output_tokens: import_zod8.z.number(),
2325
+ output_tokens_details: import_zod8.z.object({ reasoning_tokens: import_zod8.z.number().nullish() }).nullish()
2148
2326
  });
2149
- var textDeltaChunkSchema = import_zod7.z.object({
2150
- type: import_zod7.z.literal("response.output_text.delta"),
2151
- delta: import_zod7.z.string()
2327
+ var textDeltaChunkSchema = import_zod8.z.object({
2328
+ type: import_zod8.z.literal("response.output_text.delta"),
2329
+ delta: import_zod8.z.string()
2152
2330
  });
2153
- var responseFinishedChunkSchema = import_zod7.z.object({
2154
- type: import_zod7.z.enum(["response.completed", "response.incomplete"]),
2155
- response: import_zod7.z.object({
2156
- incomplete_details: import_zod7.z.object({ reason: import_zod7.z.string() }).nullish(),
2331
+ var responseFinishedChunkSchema = import_zod8.z.object({
2332
+ type: import_zod8.z.enum(["response.completed", "response.incomplete"]),
2333
+ response: import_zod8.z.object({
2334
+ incomplete_details: import_zod8.z.object({ reason: import_zod8.z.string() }).nullish(),
2157
2335
  usage: usageSchema
2158
2336
  })
2159
2337
  });
2160
- var responseCreatedChunkSchema = import_zod7.z.object({
2161
- type: import_zod7.z.literal("response.created"),
2162
- response: import_zod7.z.object({
2163
- id: import_zod7.z.string(),
2164
- created_at: import_zod7.z.number(),
2165
- model: import_zod7.z.string()
2338
+ var responseCreatedChunkSchema = import_zod8.z.object({
2339
+ type: import_zod8.z.literal("response.created"),
2340
+ response: import_zod8.z.object({
2341
+ id: import_zod8.z.string(),
2342
+ created_at: import_zod8.z.number(),
2343
+ model: import_zod8.z.string()
2166
2344
  })
2167
2345
  });
2168
- var responseOutputItemDoneSchema = import_zod7.z.object({
2169
- type: import_zod7.z.literal("response.output_item.done"),
2170
- output_index: import_zod7.z.number(),
2171
- item: import_zod7.z.discriminatedUnion("type", [
2172
- import_zod7.z.object({
2173
- type: import_zod7.z.literal("message")
2346
+ var responseOutputItemDoneSchema = import_zod8.z.object({
2347
+ type: import_zod8.z.literal("response.output_item.done"),
2348
+ output_index: import_zod8.z.number(),
2349
+ item: import_zod8.z.discriminatedUnion("type", [
2350
+ import_zod8.z.object({
2351
+ type: import_zod8.z.literal("message")
2174
2352
  }),
2175
- import_zod7.z.object({
2176
- type: import_zod7.z.literal("function_call"),
2177
- id: import_zod7.z.string(),
2178
- call_id: import_zod7.z.string(),
2179
- name: import_zod7.z.string(),
2180
- arguments: import_zod7.z.string(),
2181
- status: import_zod7.z.literal("completed")
2353
+ import_zod8.z.object({
2354
+ type: import_zod8.z.literal("function_call"),
2355
+ id: import_zod8.z.string(),
2356
+ call_id: import_zod8.z.string(),
2357
+ name: import_zod8.z.string(),
2358
+ arguments: import_zod8.z.string(),
2359
+ status: import_zod8.z.literal("completed")
2182
2360
  })
2183
2361
  ])
2184
2362
  });
2185
- var responseFunctionCallArgumentsDeltaSchema = import_zod7.z.object({
2186
- type: import_zod7.z.literal("response.function_call_arguments.delta"),
2187
- item_id: import_zod7.z.string(),
2188
- output_index: import_zod7.z.number(),
2189
- delta: import_zod7.z.string()
2363
+ var responseFunctionCallArgumentsDeltaSchema = import_zod8.z.object({
2364
+ type: import_zod8.z.literal("response.function_call_arguments.delta"),
2365
+ item_id: import_zod8.z.string(),
2366
+ output_index: import_zod8.z.number(),
2367
+ delta: import_zod8.z.string()
2190
2368
  });
2191
- var responseOutputItemAddedSchema = import_zod7.z.object({
2192
- type: import_zod7.z.literal("response.output_item.added"),
2193
- output_index: import_zod7.z.number(),
2194
- item: import_zod7.z.discriminatedUnion("type", [
2195
- import_zod7.z.object({
2196
- type: import_zod7.z.literal("message")
2369
+ var responseOutputItemAddedSchema = import_zod8.z.object({
2370
+ type: import_zod8.z.literal("response.output_item.added"),
2371
+ output_index: import_zod8.z.number(),
2372
+ item: import_zod8.z.discriminatedUnion("type", [
2373
+ import_zod8.z.object({
2374
+ type: import_zod8.z.literal("message")
2197
2375
  }),
2198
- import_zod7.z.object({
2199
- type: import_zod7.z.literal("function_call"),
2200
- id: import_zod7.z.string(),
2201
- call_id: import_zod7.z.string(),
2202
- name: import_zod7.z.string(),
2203
- arguments: import_zod7.z.string()
2376
+ import_zod8.z.object({
2377
+ type: import_zod8.z.literal("function_call"),
2378
+ id: import_zod8.z.string(),
2379
+ call_id: import_zod8.z.string(),
2380
+ name: import_zod8.z.string(),
2381
+ arguments: import_zod8.z.string()
2204
2382
  })
2205
2383
  ])
2206
2384
  });
2207
- var responseAnnotationAddedSchema = import_zod7.z.object({
2208
- type: import_zod7.z.literal("response.output_text.annotation.added"),
2209
- annotation: import_zod7.z.object({
2210
- type: import_zod7.z.literal("url_citation"),
2211
- url: import_zod7.z.string(),
2212
- title: import_zod7.z.string()
2385
+ var responseAnnotationAddedSchema = import_zod8.z.object({
2386
+ type: import_zod8.z.literal("response.output_text.annotation.added"),
2387
+ annotation: import_zod8.z.object({
2388
+ type: import_zod8.z.literal("url_citation"),
2389
+ url: import_zod8.z.string(),
2390
+ title: import_zod8.z.string()
2213
2391
  })
2214
2392
  });
2215
- var openaiResponsesChunkSchema = import_zod7.z.union([
2393
+ var openaiResponsesChunkSchema = import_zod8.z.union([
2216
2394
  textDeltaChunkSchema,
2217
2395
  responseFinishedChunkSchema,
2218
2396
  responseCreatedChunkSchema,
@@ -2220,7 +2398,7 @@ var openaiResponsesChunkSchema = import_zod7.z.union([
2220
2398
  responseFunctionCallArgumentsDeltaSchema,
2221
2399
  responseOutputItemAddedSchema,
2222
2400
  responseAnnotationAddedSchema,
2223
- import_zod7.z.object({ type: import_zod7.z.string() }).passthrough()
2401
+ import_zod8.z.object({ type: import_zod8.z.string() }).passthrough()
2224
2402
  // fallback for unknown chunks
2225
2403
  ]);
2226
2404
  function isTextDeltaChunk(chunk) {
@@ -2265,25 +2443,25 @@ function getResponsesModelConfig(modelId) {
2265
2443
  requiredAutoTruncation: false
2266
2444
  };
2267
2445
  }
2268
- var openaiResponsesProviderOptionsSchema = import_zod7.z.object({
2269
- metadata: import_zod7.z.any().nullish(),
2270
- parallelToolCalls: import_zod7.z.boolean().nullish(),
2271
- previousResponseId: import_zod7.z.string().nullish(),
2272
- store: import_zod7.z.boolean().nullish(),
2273
- user: import_zod7.z.string().nullish(),
2274
- reasoningEffort: import_zod7.z.string().nullish(),
2275
- strictSchemas: import_zod7.z.boolean().nullish(),
2276
- instructions: import_zod7.z.string().nullish()
2446
+ var openaiResponsesProviderOptionsSchema = import_zod8.z.object({
2447
+ metadata: import_zod8.z.any().nullish(),
2448
+ parallelToolCalls: import_zod8.z.boolean().nullish(),
2449
+ previousResponseId: import_zod8.z.string().nullish(),
2450
+ store: import_zod8.z.boolean().nullish(),
2451
+ user: import_zod8.z.string().nullish(),
2452
+ reasoningEffort: import_zod8.z.string().nullish(),
2453
+ strictSchemas: import_zod8.z.boolean().nullish(),
2454
+ instructions: import_zod8.z.string().nullish()
2277
2455
  });
2278
2456
 
2279
2457
  // src/openai-provider.ts
2280
2458
  function createOpenAI(options = {}) {
2281
2459
  var _a, _b, _c;
2282
- const baseURL = (_a = (0, import_provider_utils9.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
2460
+ const baseURL = (_a = (0, import_provider_utils8.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
2283
2461
  const compatibility = (_b = options.compatibility) != null ? _b : "compatible";
2284
2462
  const providerName = (_c = options.name) != null ? _c : "openai";
2285
2463
  const getHeaders = () => ({
2286
- Authorization: `Bearer ${(0, import_provider_utils9.loadApiKey)({
2464
+ Authorization: `Bearer ${(0, import_provider_utils8.loadApiKey)({
2287
2465
  apiKey: options.apiKey,
2288
2466
  environmentVariableName: "OPENAI_API_KEY",
2289
2467
  description: "OpenAI"
@@ -2318,6 +2496,12 @@ function createOpenAI(options = {}) {
2318
2496
  headers: getHeaders,
2319
2497
  fetch: options.fetch
2320
2498
  });
2499
+ const createTranscriptionModel = (modelId) => new OpenAITranscriptionModel(modelId, {
2500
+ provider: `${providerName}.transcription`,
2501
+ url: ({ path }) => `${baseURL}${path}`,
2502
+ headers: getHeaders,
2503
+ fetch: options.fetch
2504
+ });
2321
2505
  const createLanguageModel = (modelId, settings) => {
2322
2506
  if (new.target) {
2323
2507
  throw new Error(
@@ -2352,6 +2536,8 @@ function createOpenAI(options = {}) {
2352
2536
  provider.textEmbeddingModel = createEmbeddingModel;
2353
2537
  provider.image = createImageModel;
2354
2538
  provider.imageModel = createImageModel;
2539
+ provider.transcription = createTranscriptionModel;
2540
+ provider.transcriptionModel = createTranscriptionModel;
2355
2541
  provider.tools = openaiTools;
2356
2542
  return provider;
2357
2543
  }