@ai-sdk/openai 2.0.0-canary.2 → 2.0.0-canary.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -25,18 +25,18 @@ __export(internal_exports, {
25
25
  OpenAIEmbeddingModel: () => OpenAIEmbeddingModel,
26
26
  OpenAIImageModel: () => OpenAIImageModel,
27
27
  OpenAIResponsesLanguageModel: () => OpenAIResponsesLanguageModel,
28
+ OpenAITranscriptionModel: () => OpenAITranscriptionModel,
28
29
  modelMaxImagesPerCall: () => modelMaxImagesPerCall
29
30
  });
30
31
  module.exports = __toCommonJS(internal_exports);
31
32
 
32
33
  // src/openai-chat-language-model.ts
33
34
  var import_provider3 = require("@ai-sdk/provider");
34
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
35
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
35
36
  var import_zod2 = require("zod");
36
37
 
37
38
  // src/convert-to-openai-chat-messages.ts
38
39
  var import_provider = require("@ai-sdk/provider");
39
- var import_provider_utils = require("@ai-sdk/provider-utils");
40
40
  function convertToOpenAIChatMessages({
41
41
  prompt,
42
42
  useLegacyFunctionCalling = false,
@@ -80,55 +80,65 @@ function convertToOpenAIChatMessages({
80
80
  messages.push({
81
81
  role: "user",
82
82
  content: content.map((part, index) => {
83
- var _a, _b, _c, _d;
83
+ var _a, _b, _c;
84
84
  switch (part.type) {
85
85
  case "text": {
86
86
  return { type: "text", text: part.text };
87
87
  }
88
- case "image": {
89
- return {
90
- type: "image_url",
91
- image_url: {
92
- url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`,
93
- // OpenAI specific extension: image detail
94
- detail: (_c = (_b = part.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
95
- }
96
- };
97
- }
98
88
  case "file": {
99
- if (part.data instanceof URL) {
100
- throw new import_provider.UnsupportedFunctionalityError({
101
- functionality: "'File content parts with URL data' functionality not supported."
102
- });
103
- }
104
- switch (part.mimeType) {
105
- case "audio/wav": {
106
- return {
107
- type: "input_audio",
108
- input_audio: { data: part.data, format: "wav" }
109
- };
110
- }
111
- case "audio/mp3":
112
- case "audio/mpeg": {
113
- return {
114
- type: "input_audio",
115
- input_audio: { data: part.data, format: "mp3" }
116
- };
89
+ if (part.mediaType.startsWith("image/")) {
90
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
91
+ return {
92
+ type: "image_url",
93
+ image_url: {
94
+ url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
95
+ // OpenAI specific extension: image detail
96
+ detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
97
+ }
98
+ };
99
+ } else if (part.mediaType.startsWith("audio/")) {
100
+ if (part.data instanceof URL) {
101
+ throw new import_provider.UnsupportedFunctionalityError({
102
+ functionality: "audio file parts with URLs"
103
+ });
117
104
  }
118
- case "application/pdf": {
119
- return {
120
- type: "file",
121
- file: {
122
- filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
123
- file_data: `data:application/pdf;base64,${part.data}`
124
- }
125
- };
105
+ switch (part.mediaType) {
106
+ case "audio/wav": {
107
+ return {
108
+ type: "input_audio",
109
+ input_audio: { data: part.data, format: "wav" }
110
+ };
111
+ }
112
+ case "audio/mp3":
113
+ case "audio/mpeg": {
114
+ return {
115
+ type: "input_audio",
116
+ input_audio: { data: part.data, format: "mp3" }
117
+ };
118
+ }
119
+ default: {
120
+ throw new import_provider.UnsupportedFunctionalityError({
121
+ functionality: `audio content parts with media type ${part.mediaType}`
122
+ });
123
+ }
126
124
  }
127
- default: {
125
+ } else if (part.mediaType === "application/pdf") {
126
+ if (part.data instanceof URL) {
128
127
  throw new import_provider.UnsupportedFunctionalityError({
129
- functionality: `File content part type ${part.mimeType} in user messages`
128
+ functionality: "PDF file parts with URLs"
130
129
  });
131
130
  }
131
+ return {
132
+ type: "file",
133
+ file: {
134
+ filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
135
+ file_data: `data:application/pdf;base64,${part.data}`
136
+ }
137
+ };
138
+ } else {
139
+ throw new import_provider.UnsupportedFunctionalityError({
140
+ functionality: `file part media type ${part.mediaType}`
141
+ });
132
142
  }
133
143
  }
134
144
  }
@@ -237,7 +247,7 @@ function mapOpenAIFinishReason(finishReason) {
237
247
 
238
248
  // src/openai-error.ts
239
249
  var import_zod = require("zod");
240
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
250
+ var import_provider_utils = require("@ai-sdk/provider-utils");
241
251
  var openaiErrorDataSchema = import_zod.z.object({
242
252
  error: import_zod.z.object({
243
253
  message: import_zod.z.string(),
@@ -249,7 +259,7 @@ var openaiErrorDataSchema = import_zod.z.object({
249
259
  code: import_zod.z.union([import_zod.z.string(), import_zod.z.number()]).nullish()
250
260
  })
251
261
  });
252
- var openaiFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
262
+ var openaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
253
263
  errorSchema: openaiErrorDataSchema,
254
264
  errorToMessage: (data) => data.error.message
255
265
  });
@@ -361,7 +371,7 @@ function prepareTools({
361
371
  default: {
362
372
  const _exhaustiveCheck = type;
363
373
  throw new import_provider2.UnsupportedFunctionalityError({
364
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
374
+ functionality: `tool choice type: ${_exhaustiveCheck}`
365
375
  });
366
376
  }
367
377
  }
@@ -568,15 +578,15 @@ var OpenAIChatLanguageModel = class {
568
578
  responseHeaders,
569
579
  value: response,
570
580
  rawValue: rawResponse
571
- } = await (0, import_provider_utils3.postJsonToApi)({
581
+ } = await (0, import_provider_utils2.postJsonToApi)({
572
582
  url: this.config.url({
573
583
  path: "/chat/completions",
574
584
  modelId: this.modelId
575
585
  }),
576
- headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
586
+ headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
577
587
  body,
578
588
  failedResponseHandler: openaiFailedResponseHandler,
579
- successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
589
+ successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
580
590
  openaiChatResponseSchema
581
591
  ),
582
592
  abortSignal: options.abortSignal,
@@ -604,7 +614,7 @@ var OpenAIChatLanguageModel = class {
604
614
  toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
605
615
  {
606
616
  toolCallType: "function",
607
- toolCallId: (0, import_provider_utils3.generateId)(),
617
+ toolCallId: (0, import_provider_utils2.generateId)(),
608
618
  toolName: choice.message.function_call.name,
609
619
  args: choice.message.function_call.arguments
610
620
  }
@@ -612,7 +622,7 @@ var OpenAIChatLanguageModel = class {
612
622
  var _a2;
613
623
  return {
614
624
  toolCallType: "function",
615
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
625
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils2.generateId)(),
616
626
  toolName: toolCall.function.name,
617
627
  args: toolCall.function.arguments
618
628
  };
@@ -623,9 +633,12 @@ var OpenAIChatLanguageModel = class {
623
633
  completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : NaN
624
634
  },
625
635
  rawCall: { rawPrompt, rawSettings },
626
- rawResponse: { headers: responseHeaders, body: rawResponse },
627
636
  request: { body: JSON.stringify(body) },
628
- response: getResponseMetadata(response),
637
+ response: {
638
+ ...getResponseMetadata(response),
639
+ headers: responseHeaders,
640
+ body: rawResponse
641
+ },
629
642
  warnings,
630
643
  logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
631
644
  providerMetadata
@@ -671,7 +684,7 @@ var OpenAIChatLanguageModel = class {
671
684
  return {
672
685
  stream: simulatedStream,
673
686
  rawCall: result.rawCall,
674
- rawResponse: result.rawResponse,
687
+ response: result.response,
675
688
  warnings: result.warnings
676
689
  };
677
690
  }
@@ -682,15 +695,15 @@ var OpenAIChatLanguageModel = class {
682
695
  // only include stream_options when in strict compatibility mode:
683
696
  stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
684
697
  };
685
- const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
698
+ const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
686
699
  url: this.config.url({
687
700
  path: "/chat/completions",
688
701
  modelId: this.modelId
689
702
  }),
690
- headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
703
+ headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
691
704
  body,
692
705
  failedResponseHandler: openaiFailedResponseHandler,
693
- successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
706
+ successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
694
707
  openaiChatChunkSchema
695
708
  ),
696
709
  abortSignal: options.abortSignal,
@@ -778,7 +791,7 @@ var OpenAIChatLanguageModel = class {
778
791
  const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
779
792
  {
780
793
  type: "function",
781
- id: (0, import_provider_utils3.generateId)(),
794
+ id: (0, import_provider_utils2.generateId)(),
782
795
  function: delta.function_call,
783
796
  index: 0
784
797
  }
@@ -825,11 +838,11 @@ var OpenAIChatLanguageModel = class {
825
838
  argsTextDelta: toolCall2.function.arguments
826
839
  });
827
840
  }
828
- if ((0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
841
+ if ((0, import_provider_utils2.isParsableJson)(toolCall2.function.arguments)) {
829
842
  controller.enqueue({
830
843
  type: "tool-call",
831
844
  toolCallType: "function",
832
- toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils3.generateId)(),
845
+ toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils2.generateId)(),
833
846
  toolName: toolCall2.function.name,
834
847
  args: toolCall2.function.arguments
835
848
  });
@@ -852,11 +865,11 @@ var OpenAIChatLanguageModel = class {
852
865
  toolName: toolCall.function.name,
853
866
  argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
854
867
  });
855
- if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
868
+ if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
856
869
  controller.enqueue({
857
870
  type: "tool-call",
858
871
  toolCallType: "function",
859
- toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils3.generateId)(),
872
+ toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils2.generateId)(),
860
873
  toolName: toolCall.function.name,
861
874
  args: toolCall.function.arguments
862
875
  });
@@ -881,7 +894,7 @@ var OpenAIChatLanguageModel = class {
881
894
  })
882
895
  ),
883
896
  rawCall: { rawPrompt, rawSettings },
884
- rawResponse: { headers: responseHeaders },
897
+ response: { headers: responseHeaders },
885
898
  request: { body: JSON.stringify(body) },
886
899
  warnings
887
900
  };
@@ -1026,7 +1039,7 @@ var reasoningModels = {
1026
1039
  };
1027
1040
 
1028
1041
  // src/openai-completion-language-model.ts
1029
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
1042
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
1030
1043
  var import_zod3 = require("zod");
1031
1044
 
1032
1045
  // src/convert-to-openai-completion-prompt.ts
@@ -1061,13 +1074,8 @@ function convertToOpenAICompletionPrompt({
1061
1074
  case "text": {
1062
1075
  return part.text;
1063
1076
  }
1064
- case "image": {
1065
- throw new import_provider4.UnsupportedFunctionalityError({
1066
- functionality: "images"
1067
- });
1068
- }
1069
1077
  }
1070
- }).join("");
1078
+ }).filter(Boolean).join("");
1071
1079
  text += `${user}:
1072
1080
  ${userMessage}
1073
1081
 
@@ -1204,15 +1212,15 @@ var OpenAICompletionLanguageModel = class {
1204
1212
  responseHeaders,
1205
1213
  value: response,
1206
1214
  rawValue: rawResponse
1207
- } = await (0, import_provider_utils4.postJsonToApi)({
1215
+ } = await (0, import_provider_utils3.postJsonToApi)({
1208
1216
  url: this.config.url({
1209
1217
  path: "/completions",
1210
1218
  modelId: this.modelId
1211
1219
  }),
1212
- headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
1220
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
1213
1221
  body: args,
1214
1222
  failedResponseHandler: openaiFailedResponseHandler,
1215
- successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
1223
+ successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
1216
1224
  openaiCompletionResponseSchema
1217
1225
  ),
1218
1226
  abortSignal: options.abortSignal,
@@ -1229,10 +1237,13 @@ var OpenAICompletionLanguageModel = class {
1229
1237
  finishReason: mapOpenAIFinishReason(choice.finish_reason),
1230
1238
  logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
1231
1239
  rawCall: { rawPrompt, rawSettings },
1232
- rawResponse: { headers: responseHeaders, body: rawResponse },
1233
- response: getResponseMetadata(response),
1234
- warnings,
1235
- request: { body: JSON.stringify(args) }
1240
+ request: { body: JSON.stringify(args) },
1241
+ response: {
1242
+ ...getResponseMetadata(response),
1243
+ headers: responseHeaders,
1244
+ body: rawResponse
1245
+ },
1246
+ warnings
1236
1247
  };
1237
1248
  }
1238
1249
  async doStream(options) {
@@ -1243,15 +1254,15 @@ var OpenAICompletionLanguageModel = class {
1243
1254
  // only include stream_options when in strict compatibility mode:
1244
1255
  stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
1245
1256
  };
1246
- const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
1257
+ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
1247
1258
  url: this.config.url({
1248
1259
  path: "/completions",
1249
1260
  modelId: this.modelId
1250
1261
  }),
1251
- headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
1262
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
1252
1263
  body,
1253
1264
  failedResponseHandler: openaiFailedResponseHandler,
1254
- successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
1265
+ successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
1255
1266
  openaiCompletionChunkSchema
1256
1267
  ),
1257
1268
  abortSignal: options.abortSignal,
@@ -1322,7 +1333,7 @@ var OpenAICompletionLanguageModel = class {
1322
1333
  })
1323
1334
  ),
1324
1335
  rawCall: { rawPrompt, rawSettings },
1325
- rawResponse: { headers: responseHeaders },
1336
+ response: { headers: responseHeaders },
1326
1337
  warnings,
1327
1338
  request: { body: JSON.stringify(body) }
1328
1339
  };
@@ -1375,7 +1386,7 @@ var openaiCompletionChunkSchema = import_zod3.z.union([
1375
1386
 
1376
1387
  // src/openai-embedding-model.ts
1377
1388
  var import_provider5 = require("@ai-sdk/provider");
1378
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
1389
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
1379
1390
  var import_zod4 = require("zod");
1380
1391
  var OpenAIEmbeddingModel = class {
1381
1392
  constructor(modelId, settings, config) {
@@ -1408,12 +1419,12 @@ var OpenAIEmbeddingModel = class {
1408
1419
  values
1409
1420
  });
1410
1421
  }
1411
- const { responseHeaders, value: response } = await (0, import_provider_utils5.postJsonToApi)({
1422
+ const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
1412
1423
  url: this.config.url({
1413
1424
  path: "/embeddings",
1414
1425
  modelId: this.modelId
1415
1426
  }),
1416
- headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
1427
+ headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), headers),
1417
1428
  body: {
1418
1429
  model: this.modelId,
1419
1430
  input: values,
@@ -1422,7 +1433,7 @@ var OpenAIEmbeddingModel = class {
1422
1433
  user: this.settings.user
1423
1434
  },
1424
1435
  failedResponseHandler: openaiFailedResponseHandler,
1425
- successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
1436
+ successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
1426
1437
  openaiTextEmbeddingResponseSchema
1427
1438
  ),
1428
1439
  abortSignal,
@@ -1441,7 +1452,7 @@ var openaiTextEmbeddingResponseSchema = import_zod4.z.object({
1441
1452
  });
1442
1453
 
1443
1454
  // src/openai-image-model.ts
1444
- var import_provider_utils6 = require("@ai-sdk/provider-utils");
1455
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
1445
1456
  var import_zod5 = require("zod");
1446
1457
 
1447
1458
  // src/openai-image-settings.ts
@@ -1488,12 +1499,12 @@ var OpenAIImageModel = class {
1488
1499
  warnings.push({ type: "unsupported-setting", setting: "seed" });
1489
1500
  }
1490
1501
  const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1491
- const { value: response, responseHeaders } = await (0, import_provider_utils6.postJsonToApi)({
1502
+ const { value: response, responseHeaders } = await (0, import_provider_utils5.postJsonToApi)({
1492
1503
  url: this.config.url({
1493
1504
  path: "/images/generations",
1494
1505
  modelId: this.modelId
1495
1506
  }),
1496
- headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), headers),
1507
+ headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
1497
1508
  body: {
1498
1509
  model: this.modelId,
1499
1510
  prompt,
@@ -1503,7 +1514,7 @@ var OpenAIImageModel = class {
1503
1514
  response_format: "b64_json"
1504
1515
  },
1505
1516
  failedResponseHandler: openaiFailedResponseHandler,
1506
- successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
1517
+ successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
1507
1518
  openaiImageResponseSchema
1508
1519
  ),
1509
1520
  abortSignal,
@@ -1524,13 +1535,186 @@ var openaiImageResponseSchema = import_zod5.z.object({
1524
1535
  data: import_zod5.z.array(import_zod5.z.object({ b64_json: import_zod5.z.string() }))
1525
1536
  });
1526
1537
 
1527
- // src/responses/openai-responses-language-model.ts
1528
- var import_provider_utils8 = require("@ai-sdk/provider-utils");
1538
+ // src/openai-transcription-model.ts
1539
+ var import_provider_utils6 = require("@ai-sdk/provider-utils");
1529
1540
  var import_zod6 = require("zod");
1541
+ var OpenAIProviderOptionsSchema = import_zod6.z.object({
1542
+ include: import_zod6.z.array(import_zod6.z.string()).optional().describe(
1543
+ "Additional information to include in the transcription response."
1544
+ ),
1545
+ language: import_zod6.z.string().optional().describe("The language of the input audio in ISO-639-1 format."),
1546
+ prompt: import_zod6.z.string().optional().describe(
1547
+ "An optional text to guide the model's style or continue a previous audio segment."
1548
+ ),
1549
+ temperature: import_zod6.z.number().min(0).max(1).optional().default(0).describe("The sampling temperature, between 0 and 1."),
1550
+ timestampGranularities: import_zod6.z.array(import_zod6.z.enum(["word", "segment"])).optional().default(["segment"]).describe(
1551
+ "The timestamp granularities to populate for this transcription."
1552
+ )
1553
+ });
1554
+ var languageMap = {
1555
+ afrikaans: "af",
1556
+ arabic: "ar",
1557
+ armenian: "hy",
1558
+ azerbaijani: "az",
1559
+ belarusian: "be",
1560
+ bosnian: "bs",
1561
+ bulgarian: "bg",
1562
+ catalan: "ca",
1563
+ chinese: "zh",
1564
+ croatian: "hr",
1565
+ czech: "cs",
1566
+ danish: "da",
1567
+ dutch: "nl",
1568
+ english: "en",
1569
+ estonian: "et",
1570
+ finnish: "fi",
1571
+ french: "fr",
1572
+ galician: "gl",
1573
+ german: "de",
1574
+ greek: "el",
1575
+ hebrew: "he",
1576
+ hindi: "hi",
1577
+ hungarian: "hu",
1578
+ icelandic: "is",
1579
+ indonesian: "id",
1580
+ italian: "it",
1581
+ japanese: "ja",
1582
+ kannada: "kn",
1583
+ kazakh: "kk",
1584
+ korean: "ko",
1585
+ latvian: "lv",
1586
+ lithuanian: "lt",
1587
+ macedonian: "mk",
1588
+ malay: "ms",
1589
+ marathi: "mr",
1590
+ maori: "mi",
1591
+ nepali: "ne",
1592
+ norwegian: "no",
1593
+ persian: "fa",
1594
+ polish: "pl",
1595
+ portuguese: "pt",
1596
+ romanian: "ro",
1597
+ russian: "ru",
1598
+ serbian: "sr",
1599
+ slovak: "sk",
1600
+ slovenian: "sl",
1601
+ spanish: "es",
1602
+ swahili: "sw",
1603
+ swedish: "sv",
1604
+ tagalog: "tl",
1605
+ tamil: "ta",
1606
+ thai: "th",
1607
+ turkish: "tr",
1608
+ ukrainian: "uk",
1609
+ urdu: "ur",
1610
+ vietnamese: "vi",
1611
+ welsh: "cy"
1612
+ };
1613
+ var OpenAITranscriptionModel = class {
1614
+ constructor(modelId, config) {
1615
+ this.modelId = modelId;
1616
+ this.config = config;
1617
+ this.specificationVersion = "v1";
1618
+ }
1619
+ get provider() {
1620
+ return this.config.provider;
1621
+ }
1622
+ getArgs({
1623
+ audio,
1624
+ mediaType,
1625
+ providerOptions
1626
+ }) {
1627
+ const warnings = [];
1628
+ const openAIOptions = (0, import_provider_utils6.parseProviderOptions)({
1629
+ provider: "openai",
1630
+ providerOptions,
1631
+ schema: OpenAIProviderOptionsSchema
1632
+ });
1633
+ const formData = new FormData();
1634
+ const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils6.convertBase64ToUint8Array)(audio)]);
1635
+ formData.append("model", this.modelId);
1636
+ formData.append("file", new File([blob], "audio", { type: mediaType }));
1637
+ if (openAIOptions) {
1638
+ const transcriptionModelOptions = {
1639
+ include: openAIOptions.include,
1640
+ language: openAIOptions.language,
1641
+ prompt: openAIOptions.prompt,
1642
+ temperature: openAIOptions.temperature,
1643
+ timestamp_granularities: openAIOptions.timestampGranularities
1644
+ };
1645
+ for (const key in transcriptionModelOptions) {
1646
+ const value = transcriptionModelOptions[key];
1647
+ if (value !== void 0) {
1648
+ formData.append(key, value);
1649
+ }
1650
+ }
1651
+ }
1652
+ return {
1653
+ formData,
1654
+ warnings
1655
+ };
1656
+ }
1657
+ async doGenerate(options) {
1658
+ var _a, _b, _c, _d, _e, _f;
1659
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1660
+ const { formData, warnings } = this.getArgs(options);
1661
+ const {
1662
+ value: response,
1663
+ responseHeaders,
1664
+ rawValue: rawResponse
1665
+ } = await (0, import_provider_utils6.postFormDataToApi)({
1666
+ url: this.config.url({
1667
+ path: "/audio/transcriptions",
1668
+ modelId: this.modelId
1669
+ }),
1670
+ headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), options.headers),
1671
+ formData,
1672
+ failedResponseHandler: openaiFailedResponseHandler,
1673
+ successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
1674
+ openaiTranscriptionResponseSchema
1675
+ ),
1676
+ abortSignal: options.abortSignal,
1677
+ fetch: this.config.fetch
1678
+ });
1679
+ const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
1680
+ return {
1681
+ text: response.text,
1682
+ segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
1683
+ text: word.word,
1684
+ startSecond: word.start,
1685
+ endSecond: word.end
1686
+ }))) != null ? _e : [],
1687
+ language,
1688
+ durationInSeconds: (_f = response.duration) != null ? _f : void 0,
1689
+ warnings,
1690
+ response: {
1691
+ timestamp: currentDate,
1692
+ modelId: this.modelId,
1693
+ headers: responseHeaders,
1694
+ body: rawResponse
1695
+ }
1696
+ };
1697
+ }
1698
+ };
1699
+ var openaiTranscriptionResponseSchema = import_zod6.z.object({
1700
+ text: import_zod6.z.string(),
1701
+ language: import_zod6.z.string().nullish(),
1702
+ duration: import_zod6.z.number().nullish(),
1703
+ words: import_zod6.z.array(
1704
+ import_zod6.z.object({
1705
+ word: import_zod6.z.string(),
1706
+ start: import_zod6.z.number(),
1707
+ end: import_zod6.z.number()
1708
+ })
1709
+ ).nullish()
1710
+ });
1711
+
1712
+ // src/responses/openai-responses-language-model.ts
1713
+ var import_provider_utils7 = require("@ai-sdk/provider-utils");
1714
+ var import_zod7 = require("zod");
1530
1715
 
1531
1716
  // src/responses/convert-to-openai-responses-messages.ts
1532
1717
  var import_provider6 = require("@ai-sdk/provider");
1533
- var import_provider_utils7 = require("@ai-sdk/provider-utils");
1534
1718
  function convertToOpenAIResponsesMessages({
1535
1719
  prompt,
1536
1720
  systemMessageMode
@@ -1569,38 +1753,35 @@ function convertToOpenAIResponsesMessages({
1569
1753
  messages.push({
1570
1754
  role: "user",
1571
1755
  content: content.map((part, index) => {
1572
- var _a, _b, _c, _d;
1756
+ var _a, _b, _c;
1573
1757
  switch (part.type) {
1574
1758
  case "text": {
1575
1759
  return { type: "input_text", text: part.text };
1576
1760
  }
1577
- case "image": {
1578
- return {
1579
- type: "input_image",
1580
- image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils7.convertUint8ArrayToBase64)(part.image)}`,
1581
- // OpenAI specific extension: image detail
1582
- detail: (_c = (_b = part.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
1583
- };
1584
- }
1585
1761
  case "file": {
1586
- if (part.data instanceof URL) {
1587
- throw new import_provider6.UnsupportedFunctionalityError({
1588
- functionality: "File URLs in user messages"
1589
- });
1590
- }
1591
- switch (part.mimeType) {
1592
- case "application/pdf": {
1593
- return {
1594
- type: "input_file",
1595
- filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
1596
- file_data: `data:application/pdf;base64,${part.data}`
1597
- };
1598
- }
1599
- default: {
1762
+ if (part.mediaType.startsWith("image/")) {
1763
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
1764
+ return {
1765
+ type: "input_image",
1766
+ image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
1767
+ // OpenAI specific extension: image detail
1768
+ detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
1769
+ };
1770
+ } else if (part.mediaType === "application/pdf") {
1771
+ if (part.data instanceof URL) {
1600
1772
  throw new import_provider6.UnsupportedFunctionalityError({
1601
- functionality: "Only PDF files are supported in user messages"
1773
+ functionality: "PDF file parts with URLs"
1602
1774
  });
1603
1775
  }
1776
+ return {
1777
+ type: "input_file",
1778
+ filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
1779
+ file_data: `data:application/pdf;base64,${part.data}`
1780
+ };
1781
+ } else {
1782
+ throw new import_provider6.UnsupportedFunctionalityError({
1783
+ functionality: `file part media type ${part.mediaType}`
1784
+ });
1604
1785
  }
1605
1786
  }
1606
1787
  }
@@ -1729,7 +1910,7 @@ function prepareResponsesTools({
1729
1910
  default: {
1730
1911
  const _exhaustiveCheck = type;
1731
1912
  throw new import_provider7.UnsupportedFunctionalityError({
1732
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
1913
+ functionality: `tool choice type: ${_exhaustiveCheck}`
1733
1914
  });
1734
1915
  }
1735
1916
  }
@@ -1790,7 +1971,7 @@ var OpenAIResponsesLanguageModel = class {
1790
1971
  systemMessageMode: modelConfig.systemMessageMode
1791
1972
  });
1792
1973
  warnings.push(...messageWarnings);
1793
- const openaiOptions = (0, import_provider_utils8.parseProviderOptions)({
1974
+ const openaiOptions = (0, import_provider_utils7.parseProviderOptions)({
1794
1975
  provider: "openai",
1795
1976
  providerOptions,
1796
1977
  schema: openaiResponsesProviderOptionsSchema
@@ -1871,58 +2052,58 @@ var OpenAIResponsesLanguageModel = class {
1871
2052
  responseHeaders,
1872
2053
  value: response,
1873
2054
  rawValue: rawResponse
1874
- } = await (0, import_provider_utils8.postJsonToApi)({
2055
+ } = await (0, import_provider_utils7.postJsonToApi)({
1875
2056
  url: this.config.url({
1876
2057
  path: "/responses",
1877
2058
  modelId: this.modelId
1878
2059
  }),
1879
- headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), options.headers),
2060
+ headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), options.headers),
1880
2061
  body,
1881
2062
  failedResponseHandler: openaiFailedResponseHandler,
1882
- successfulResponseHandler: (0, import_provider_utils8.createJsonResponseHandler)(
1883
- import_zod6.z.object({
1884
- id: import_zod6.z.string(),
1885
- created_at: import_zod6.z.number(),
1886
- model: import_zod6.z.string(),
1887
- output: import_zod6.z.array(
1888
- import_zod6.z.discriminatedUnion("type", [
1889
- import_zod6.z.object({
1890
- type: import_zod6.z.literal("message"),
1891
- role: import_zod6.z.literal("assistant"),
1892
- content: import_zod6.z.array(
1893
- import_zod6.z.object({
1894
- type: import_zod6.z.literal("output_text"),
1895
- text: import_zod6.z.string(),
1896
- annotations: import_zod6.z.array(
1897
- import_zod6.z.object({
1898
- type: import_zod6.z.literal("url_citation"),
1899
- start_index: import_zod6.z.number(),
1900
- end_index: import_zod6.z.number(),
1901
- url: import_zod6.z.string(),
1902
- title: import_zod6.z.string()
2063
+ successfulResponseHandler: (0, import_provider_utils7.createJsonResponseHandler)(
2064
+ import_zod7.z.object({
2065
+ id: import_zod7.z.string(),
2066
+ created_at: import_zod7.z.number(),
2067
+ model: import_zod7.z.string(),
2068
+ output: import_zod7.z.array(
2069
+ import_zod7.z.discriminatedUnion("type", [
2070
+ import_zod7.z.object({
2071
+ type: import_zod7.z.literal("message"),
2072
+ role: import_zod7.z.literal("assistant"),
2073
+ content: import_zod7.z.array(
2074
+ import_zod7.z.object({
2075
+ type: import_zod7.z.literal("output_text"),
2076
+ text: import_zod7.z.string(),
2077
+ annotations: import_zod7.z.array(
2078
+ import_zod7.z.object({
2079
+ type: import_zod7.z.literal("url_citation"),
2080
+ start_index: import_zod7.z.number(),
2081
+ end_index: import_zod7.z.number(),
2082
+ url: import_zod7.z.string(),
2083
+ title: import_zod7.z.string()
1903
2084
  })
1904
2085
  )
1905
2086
  })
1906
2087
  )
1907
2088
  }),
1908
- import_zod6.z.object({
1909
- type: import_zod6.z.literal("function_call"),
1910
- call_id: import_zod6.z.string(),
1911
- name: import_zod6.z.string(),
1912
- arguments: import_zod6.z.string()
2089
+ import_zod7.z.object({
2090
+ type: import_zod7.z.literal("function_call"),
2091
+ call_id: import_zod7.z.string(),
2092
+ name: import_zod7.z.string(),
2093
+ arguments: import_zod7.z.string()
1913
2094
  }),
1914
- import_zod6.z.object({
1915
- type: import_zod6.z.literal("web_search_call")
2095
+ import_zod7.z.object({
2096
+ type: import_zod7.z.literal("web_search_call")
1916
2097
  }),
1917
- import_zod6.z.object({
1918
- type: import_zod6.z.literal("computer_call")
2098
+ import_zod7.z.object({
2099
+ type: import_zod7.z.literal("computer_call")
1919
2100
  }),
1920
- import_zod6.z.object({
1921
- type: import_zod6.z.literal("reasoning")
2101
+ import_zod7.z.object({
2102
+ type: import_zod7.z.literal("reasoning")
1922
2103
  })
1923
2104
  ])
1924
2105
  ),
1925
- incomplete_details: import_zod6.z.object({ reason: import_zod6.z.string() }).nullable(),
2106
+ incomplete_details: import_zod7.z.object({ reason: import_zod7.z.string() }).nullable(),
1926
2107
  usage: usageSchema
1927
2108
  })
1928
2109
  ),
@@ -1943,7 +2124,7 @@ var OpenAIResponsesLanguageModel = class {
1943
2124
  var _a2, _b2, _c2;
1944
2125
  return {
1945
2126
  sourceType: "url",
1946
- id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : (0, import_provider_utils8.generateId)(),
2127
+ id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : (0, import_provider_utils7.generateId)(),
1947
2128
  url: annotation.url,
1948
2129
  title: annotation.title
1949
2130
  };
@@ -1962,17 +2143,15 @@ var OpenAIResponsesLanguageModel = class {
1962
2143
  rawPrompt: void 0,
1963
2144
  rawSettings: {}
1964
2145
  },
1965
- rawResponse: {
1966
- headers: responseHeaders,
1967
- body: rawResponse
1968
- },
1969
2146
  request: {
1970
2147
  body: JSON.stringify(body)
1971
2148
  },
1972
2149
  response: {
1973
2150
  id: response.id,
1974
2151
  timestamp: new Date(response.created_at * 1e3),
1975
- modelId: response.model
2152
+ modelId: response.model,
2153
+ headers: responseHeaders,
2154
+ body: rawResponse
1976
2155
  },
1977
2156
  providerMetadata: {
1978
2157
  openai: {
@@ -1986,18 +2165,18 @@ var OpenAIResponsesLanguageModel = class {
1986
2165
  }
1987
2166
  async doStream(options) {
1988
2167
  const { args: body, warnings } = this.getArgs(options);
1989
- const { responseHeaders, value: response } = await (0, import_provider_utils8.postJsonToApi)({
2168
+ const { responseHeaders, value: response } = await (0, import_provider_utils7.postJsonToApi)({
1990
2169
  url: this.config.url({
1991
2170
  path: "/responses",
1992
2171
  modelId: this.modelId
1993
2172
  }),
1994
- headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), options.headers),
2173
+ headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), options.headers),
1995
2174
  body: {
1996
2175
  ...body,
1997
2176
  stream: true
1998
2177
  },
1999
2178
  failedResponseHandler: openaiFailedResponseHandler,
2000
- successfulResponseHandler: (0, import_provider_utils8.createEventSourceResponseHandler)(
2179
+ successfulResponseHandler: (0, import_provider_utils7.createEventSourceResponseHandler)(
2001
2180
  openaiResponsesChunkSchema
2002
2181
  ),
2003
2182
  abortSignal: options.abortSignal,
@@ -2085,7 +2264,7 @@ var OpenAIResponsesLanguageModel = class {
2085
2264
  type: "source",
2086
2265
  source: {
2087
2266
  sourceType: "url",
2088
- id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils8.generateId)(),
2267
+ id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils7.generateId)(),
2089
2268
  url: value.annotation.url,
2090
2269
  title: value.annotation.title
2091
2270
  }
@@ -2114,85 +2293,85 @@ var OpenAIResponsesLanguageModel = class {
2114
2293
  rawPrompt: void 0,
2115
2294
  rawSettings: {}
2116
2295
  },
2117
- rawResponse: { headers: responseHeaders },
2118
2296
  request: { body: JSON.stringify(body) },
2297
+ response: { headers: responseHeaders },
2119
2298
  warnings
2120
2299
  };
2121
2300
  }
2122
2301
  };
2123
- var usageSchema = import_zod6.z.object({
2124
- input_tokens: import_zod6.z.number(),
2125
- input_tokens_details: import_zod6.z.object({ cached_tokens: import_zod6.z.number().nullish() }).nullish(),
2126
- output_tokens: import_zod6.z.number(),
2127
- output_tokens_details: import_zod6.z.object({ reasoning_tokens: import_zod6.z.number().nullish() }).nullish()
2302
+ var usageSchema = import_zod7.z.object({
2303
+ input_tokens: import_zod7.z.number(),
2304
+ input_tokens_details: import_zod7.z.object({ cached_tokens: import_zod7.z.number().nullish() }).nullish(),
2305
+ output_tokens: import_zod7.z.number(),
2306
+ output_tokens_details: import_zod7.z.object({ reasoning_tokens: import_zod7.z.number().nullish() }).nullish()
2128
2307
  });
2129
- var textDeltaChunkSchema = import_zod6.z.object({
2130
- type: import_zod6.z.literal("response.output_text.delta"),
2131
- delta: import_zod6.z.string()
2308
+ var textDeltaChunkSchema = import_zod7.z.object({
2309
+ type: import_zod7.z.literal("response.output_text.delta"),
2310
+ delta: import_zod7.z.string()
2132
2311
  });
2133
- var responseFinishedChunkSchema = import_zod6.z.object({
2134
- type: import_zod6.z.enum(["response.completed", "response.incomplete"]),
2135
- response: import_zod6.z.object({
2136
- incomplete_details: import_zod6.z.object({ reason: import_zod6.z.string() }).nullish(),
2312
+ var responseFinishedChunkSchema = import_zod7.z.object({
2313
+ type: import_zod7.z.enum(["response.completed", "response.incomplete"]),
2314
+ response: import_zod7.z.object({
2315
+ incomplete_details: import_zod7.z.object({ reason: import_zod7.z.string() }).nullish(),
2137
2316
  usage: usageSchema
2138
2317
  })
2139
2318
  });
2140
- var responseCreatedChunkSchema = import_zod6.z.object({
2141
- type: import_zod6.z.literal("response.created"),
2142
- response: import_zod6.z.object({
2143
- id: import_zod6.z.string(),
2144
- created_at: import_zod6.z.number(),
2145
- model: import_zod6.z.string()
2319
+ var responseCreatedChunkSchema = import_zod7.z.object({
2320
+ type: import_zod7.z.literal("response.created"),
2321
+ response: import_zod7.z.object({
2322
+ id: import_zod7.z.string(),
2323
+ created_at: import_zod7.z.number(),
2324
+ model: import_zod7.z.string()
2146
2325
  })
2147
2326
  });
2148
- var responseOutputItemDoneSchema = import_zod6.z.object({
2149
- type: import_zod6.z.literal("response.output_item.done"),
2150
- output_index: import_zod6.z.number(),
2151
- item: import_zod6.z.discriminatedUnion("type", [
2152
- import_zod6.z.object({
2153
- type: import_zod6.z.literal("message")
2327
+ var responseOutputItemDoneSchema = import_zod7.z.object({
2328
+ type: import_zod7.z.literal("response.output_item.done"),
2329
+ output_index: import_zod7.z.number(),
2330
+ item: import_zod7.z.discriminatedUnion("type", [
2331
+ import_zod7.z.object({
2332
+ type: import_zod7.z.literal("message")
2154
2333
  }),
2155
- import_zod6.z.object({
2156
- type: import_zod6.z.literal("function_call"),
2157
- id: import_zod6.z.string(),
2158
- call_id: import_zod6.z.string(),
2159
- name: import_zod6.z.string(),
2160
- arguments: import_zod6.z.string(),
2161
- status: import_zod6.z.literal("completed")
2334
+ import_zod7.z.object({
2335
+ type: import_zod7.z.literal("function_call"),
2336
+ id: import_zod7.z.string(),
2337
+ call_id: import_zod7.z.string(),
2338
+ name: import_zod7.z.string(),
2339
+ arguments: import_zod7.z.string(),
2340
+ status: import_zod7.z.literal("completed")
2162
2341
  })
2163
2342
  ])
2164
2343
  });
2165
- var responseFunctionCallArgumentsDeltaSchema = import_zod6.z.object({
2166
- type: import_zod6.z.literal("response.function_call_arguments.delta"),
2167
- item_id: import_zod6.z.string(),
2168
- output_index: import_zod6.z.number(),
2169
- delta: import_zod6.z.string()
2344
+ var responseFunctionCallArgumentsDeltaSchema = import_zod7.z.object({
2345
+ type: import_zod7.z.literal("response.function_call_arguments.delta"),
2346
+ item_id: import_zod7.z.string(),
2347
+ output_index: import_zod7.z.number(),
2348
+ delta: import_zod7.z.string()
2170
2349
  });
2171
- var responseOutputItemAddedSchema = import_zod6.z.object({
2172
- type: import_zod6.z.literal("response.output_item.added"),
2173
- output_index: import_zod6.z.number(),
2174
- item: import_zod6.z.discriminatedUnion("type", [
2175
- import_zod6.z.object({
2176
- type: import_zod6.z.literal("message")
2350
+ var responseOutputItemAddedSchema = import_zod7.z.object({
2351
+ type: import_zod7.z.literal("response.output_item.added"),
2352
+ output_index: import_zod7.z.number(),
2353
+ item: import_zod7.z.discriminatedUnion("type", [
2354
+ import_zod7.z.object({
2355
+ type: import_zod7.z.literal("message")
2177
2356
  }),
2178
- import_zod6.z.object({
2179
- type: import_zod6.z.literal("function_call"),
2180
- id: import_zod6.z.string(),
2181
- call_id: import_zod6.z.string(),
2182
- name: import_zod6.z.string(),
2183
- arguments: import_zod6.z.string()
2357
+ import_zod7.z.object({
2358
+ type: import_zod7.z.literal("function_call"),
2359
+ id: import_zod7.z.string(),
2360
+ call_id: import_zod7.z.string(),
2361
+ name: import_zod7.z.string(),
2362
+ arguments: import_zod7.z.string()
2184
2363
  })
2185
2364
  ])
2186
2365
  });
2187
- var responseAnnotationAddedSchema = import_zod6.z.object({
2188
- type: import_zod6.z.literal("response.output_text.annotation.added"),
2189
- annotation: import_zod6.z.object({
2190
- type: import_zod6.z.literal("url_citation"),
2191
- url: import_zod6.z.string(),
2192
- title: import_zod6.z.string()
2366
+ var responseAnnotationAddedSchema = import_zod7.z.object({
2367
+ type: import_zod7.z.literal("response.output_text.annotation.added"),
2368
+ annotation: import_zod7.z.object({
2369
+ type: import_zod7.z.literal("url_citation"),
2370
+ url: import_zod7.z.string(),
2371
+ title: import_zod7.z.string()
2193
2372
  })
2194
2373
  });
2195
- var openaiResponsesChunkSchema = import_zod6.z.union([
2374
+ var openaiResponsesChunkSchema = import_zod7.z.union([
2196
2375
  textDeltaChunkSchema,
2197
2376
  responseFinishedChunkSchema,
2198
2377
  responseCreatedChunkSchema,
@@ -2200,7 +2379,7 @@ var openaiResponsesChunkSchema = import_zod6.z.union([
2200
2379
  responseFunctionCallArgumentsDeltaSchema,
2201
2380
  responseOutputItemAddedSchema,
2202
2381
  responseAnnotationAddedSchema,
2203
- import_zod6.z.object({ type: import_zod6.z.string() }).passthrough()
2382
+ import_zod7.z.object({ type: import_zod7.z.string() }).passthrough()
2204
2383
  // fallback for unknown chunks
2205
2384
  ]);
2206
2385
  function isTextDeltaChunk(chunk) {
@@ -2245,15 +2424,15 @@ function getResponsesModelConfig(modelId) {
2245
2424
  requiredAutoTruncation: false
2246
2425
  };
2247
2426
  }
2248
- var openaiResponsesProviderOptionsSchema = import_zod6.z.object({
2249
- metadata: import_zod6.z.any().nullish(),
2250
- parallelToolCalls: import_zod6.z.boolean().nullish(),
2251
- previousResponseId: import_zod6.z.string().nullish(),
2252
- store: import_zod6.z.boolean().nullish(),
2253
- user: import_zod6.z.string().nullish(),
2254
- reasoningEffort: import_zod6.z.string().nullish(),
2255
- strictSchemas: import_zod6.z.boolean().nullish(),
2256
- instructions: import_zod6.z.string().nullish()
2427
+ var openaiResponsesProviderOptionsSchema = import_zod7.z.object({
2428
+ metadata: import_zod7.z.any().nullish(),
2429
+ parallelToolCalls: import_zod7.z.boolean().nullish(),
2430
+ previousResponseId: import_zod7.z.string().nullish(),
2431
+ store: import_zod7.z.boolean().nullish(),
2432
+ user: import_zod7.z.string().nullish(),
2433
+ reasoningEffort: import_zod7.z.string().nullish(),
2434
+ strictSchemas: import_zod7.z.boolean().nullish(),
2435
+ instructions: import_zod7.z.string().nullish()
2257
2436
  });
2258
2437
  // Annotate the CommonJS export names for ESM import in node:
2259
2438
  0 && (module.exports = {
@@ -2262,6 +2441,7 @@ var openaiResponsesProviderOptionsSchema = import_zod6.z.object({
2262
2441
  OpenAIEmbeddingModel,
2263
2442
  OpenAIImageModel,
2264
2443
  OpenAIResponsesLanguageModel,
2444
+ OpenAITranscriptionModel,
2265
2445
  modelMaxImagesPerCall
2266
2446
  });
2267
2447
  //# sourceMappingURL=index.js.map