@ai-sdk/openai 2.0.0-canary.2 → 2.0.0-canary.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17,7 +17,6 @@ import { z as z2 } from "zod";
17
17
  import {
18
18
  UnsupportedFunctionalityError
19
19
  } from "@ai-sdk/provider";
20
- import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
21
20
  function convertToOpenAIChatMessages({
22
21
  prompt,
23
22
  useLegacyFunctionCalling = false,
@@ -61,55 +60,65 @@ function convertToOpenAIChatMessages({
61
60
  messages.push({
62
61
  role: "user",
63
62
  content: content.map((part, index) => {
64
- var _a, _b, _c, _d;
63
+ var _a, _b, _c;
65
64
  switch (part.type) {
66
65
  case "text": {
67
66
  return { type: "text", text: part.text };
68
67
  }
69
- case "image": {
70
- return {
71
- type: "image_url",
72
- image_url: {
73
- url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`,
74
- // OpenAI specific extension: image detail
75
- detail: (_c = (_b = part.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
76
- }
77
- };
78
- }
79
68
  case "file": {
80
- if (part.data instanceof URL) {
81
- throw new UnsupportedFunctionalityError({
82
- functionality: "'File content parts with URL data' functionality not supported."
83
- });
84
- }
85
- switch (part.mimeType) {
86
- case "audio/wav": {
87
- return {
88
- type: "input_audio",
89
- input_audio: { data: part.data, format: "wav" }
90
- };
91
- }
92
- case "audio/mp3":
93
- case "audio/mpeg": {
94
- return {
95
- type: "input_audio",
96
- input_audio: { data: part.data, format: "mp3" }
97
- };
69
+ if (part.mediaType.startsWith("image/")) {
70
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
71
+ return {
72
+ type: "image_url",
73
+ image_url: {
74
+ url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
75
+ // OpenAI specific extension: image detail
76
+ detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
77
+ }
78
+ };
79
+ } else if (part.mediaType.startsWith("audio/")) {
80
+ if (part.data instanceof URL) {
81
+ throw new UnsupportedFunctionalityError({
82
+ functionality: "audio file parts with URLs"
83
+ });
98
84
  }
99
- case "application/pdf": {
100
- return {
101
- type: "file",
102
- file: {
103
- filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
104
- file_data: `data:application/pdf;base64,${part.data}`
105
- }
106
- };
85
+ switch (part.mediaType) {
86
+ case "audio/wav": {
87
+ return {
88
+ type: "input_audio",
89
+ input_audio: { data: part.data, format: "wav" }
90
+ };
91
+ }
92
+ case "audio/mp3":
93
+ case "audio/mpeg": {
94
+ return {
95
+ type: "input_audio",
96
+ input_audio: { data: part.data, format: "mp3" }
97
+ };
98
+ }
99
+ default: {
100
+ throw new UnsupportedFunctionalityError({
101
+ functionality: `audio content parts with media type ${part.mediaType}`
102
+ });
103
+ }
107
104
  }
108
- default: {
105
+ } else if (part.mediaType === "application/pdf") {
106
+ if (part.data instanceof URL) {
109
107
  throw new UnsupportedFunctionalityError({
110
- functionality: `File content part type ${part.mimeType} in user messages`
108
+ functionality: "PDF file parts with URLs"
111
109
  });
112
110
  }
111
+ return {
112
+ type: "file",
113
+ file: {
114
+ filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
115
+ file_data: `data:application/pdf;base64,${part.data}`
116
+ }
117
+ };
118
+ } else {
119
+ throw new UnsupportedFunctionalityError({
120
+ functionality: `file part media type ${part.mediaType}`
121
+ });
113
122
  }
114
123
  }
115
124
  }
@@ -344,7 +353,7 @@ function prepareTools({
344
353
  default: {
345
354
  const _exhaustiveCheck = type;
346
355
  throw new UnsupportedFunctionalityError2({
347
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
356
+ functionality: `tool choice type: ${_exhaustiveCheck}`
348
357
  });
349
358
  }
350
359
  }
@@ -1052,13 +1061,8 @@ function convertToOpenAICompletionPrompt({
1052
1061
  case "text": {
1053
1062
  return part.text;
1054
1063
  }
1055
- case "image": {
1056
- throw new UnsupportedFunctionalityError4({
1057
- functionality: "images"
1058
- });
1059
- }
1060
1064
  }
1061
- }).join("");
1065
+ }).filter(Boolean).join("");
1062
1066
  text += `${user}:
1063
1067
  ${userMessage}
1064
1068
 
@@ -1525,22 +1529,201 @@ var openaiImageResponseSchema = z5.object({
1525
1529
  data: z5.array(z5.object({ b64_json: z5.string() }))
1526
1530
  });
1527
1531
 
1528
- // src/responses/openai-responses-language-model.ts
1532
+ // src/openai-transcription-model.ts
1529
1533
  import {
1530
1534
  combineHeaders as combineHeaders5,
1531
- createEventSourceResponseHandler as createEventSourceResponseHandler3,
1535
+ convertBase64ToUint8Array,
1532
1536
  createJsonResponseHandler as createJsonResponseHandler5,
1533
- generateId as generateId2,
1534
1537
  parseProviderOptions,
1535
- postJsonToApi as postJsonToApi5
1538
+ postFormDataToApi
1536
1539
  } from "@ai-sdk/provider-utils";
1537
1540
  import { z as z6 } from "zod";
1541
+ var OpenAIProviderOptionsSchema = z6.object({
1542
+ include: z6.array(z6.string()).optional().describe(
1543
+ "Additional information to include in the transcription response."
1544
+ ),
1545
+ language: z6.string().optional().describe("The language of the input audio in ISO-639-1 format."),
1546
+ prompt: z6.string().optional().describe(
1547
+ "An optional text to guide the model's style or continue a previous audio segment."
1548
+ ),
1549
+ temperature: z6.number().min(0).max(1).optional().default(0).describe("The sampling temperature, between 0 and 1."),
1550
+ timestampGranularities: z6.array(z6.enum(["word", "segment"])).optional().default(["segment"]).describe(
1551
+ "The timestamp granularities to populate for this transcription."
1552
+ )
1553
+ });
1554
+ var languageMap = {
1555
+ afrikaans: "af",
1556
+ arabic: "ar",
1557
+ armenian: "hy",
1558
+ azerbaijani: "az",
1559
+ belarusian: "be",
1560
+ bosnian: "bs",
1561
+ bulgarian: "bg",
1562
+ catalan: "ca",
1563
+ chinese: "zh",
1564
+ croatian: "hr",
1565
+ czech: "cs",
1566
+ danish: "da",
1567
+ dutch: "nl",
1568
+ english: "en",
1569
+ estonian: "et",
1570
+ finnish: "fi",
1571
+ french: "fr",
1572
+ galician: "gl",
1573
+ german: "de",
1574
+ greek: "el",
1575
+ hebrew: "he",
1576
+ hindi: "hi",
1577
+ hungarian: "hu",
1578
+ icelandic: "is",
1579
+ indonesian: "id",
1580
+ italian: "it",
1581
+ japanese: "ja",
1582
+ kannada: "kn",
1583
+ kazakh: "kk",
1584
+ korean: "ko",
1585
+ latvian: "lv",
1586
+ lithuanian: "lt",
1587
+ macedonian: "mk",
1588
+ malay: "ms",
1589
+ marathi: "mr",
1590
+ maori: "mi",
1591
+ nepali: "ne",
1592
+ norwegian: "no",
1593
+ persian: "fa",
1594
+ polish: "pl",
1595
+ portuguese: "pt",
1596
+ romanian: "ro",
1597
+ russian: "ru",
1598
+ serbian: "sr",
1599
+ slovak: "sk",
1600
+ slovenian: "sl",
1601
+ spanish: "es",
1602
+ swahili: "sw",
1603
+ swedish: "sv",
1604
+ tagalog: "tl",
1605
+ tamil: "ta",
1606
+ thai: "th",
1607
+ turkish: "tr",
1608
+ ukrainian: "uk",
1609
+ urdu: "ur",
1610
+ vietnamese: "vi",
1611
+ welsh: "cy"
1612
+ };
1613
+ var OpenAITranscriptionModel = class {
1614
+ constructor(modelId, config) {
1615
+ this.modelId = modelId;
1616
+ this.config = config;
1617
+ this.specificationVersion = "v1";
1618
+ }
1619
+ get provider() {
1620
+ return this.config.provider;
1621
+ }
1622
+ getArgs({
1623
+ audio,
1624
+ mediaType,
1625
+ providerOptions
1626
+ }) {
1627
+ const warnings = [];
1628
+ const openAIOptions = parseProviderOptions({
1629
+ provider: "openai",
1630
+ providerOptions,
1631
+ schema: OpenAIProviderOptionsSchema
1632
+ });
1633
+ const formData = new FormData();
1634
+ const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
1635
+ formData.append("model", this.modelId);
1636
+ formData.append("file", new File([blob], "audio", { type: mediaType }));
1637
+ if (openAIOptions) {
1638
+ const transcriptionModelOptions = {
1639
+ include: openAIOptions.include,
1640
+ language: openAIOptions.language,
1641
+ prompt: openAIOptions.prompt,
1642
+ temperature: openAIOptions.temperature,
1643
+ timestamp_granularities: openAIOptions.timestampGranularities
1644
+ };
1645
+ for (const key in transcriptionModelOptions) {
1646
+ const value = transcriptionModelOptions[key];
1647
+ if (value !== void 0) {
1648
+ formData.append(key, value);
1649
+ }
1650
+ }
1651
+ }
1652
+ return {
1653
+ formData,
1654
+ warnings
1655
+ };
1656
+ }
1657
+ async doGenerate(options) {
1658
+ var _a, _b, _c, _d, _e, _f;
1659
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1660
+ const { formData, warnings } = this.getArgs(options);
1661
+ const {
1662
+ value: response,
1663
+ responseHeaders,
1664
+ rawValue: rawResponse
1665
+ } = await postFormDataToApi({
1666
+ url: this.config.url({
1667
+ path: "/audio/transcriptions",
1668
+ modelId: this.modelId
1669
+ }),
1670
+ headers: combineHeaders5(this.config.headers(), options.headers),
1671
+ formData,
1672
+ failedResponseHandler: openaiFailedResponseHandler,
1673
+ successfulResponseHandler: createJsonResponseHandler5(
1674
+ openaiTranscriptionResponseSchema
1675
+ ),
1676
+ abortSignal: options.abortSignal,
1677
+ fetch: this.config.fetch
1678
+ });
1679
+ const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
1680
+ return {
1681
+ text: response.text,
1682
+ segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
1683
+ text: word.word,
1684
+ startSecond: word.start,
1685
+ endSecond: word.end
1686
+ }))) != null ? _e : [],
1687
+ language,
1688
+ durationInSeconds: (_f = response.duration) != null ? _f : void 0,
1689
+ warnings,
1690
+ response: {
1691
+ timestamp: currentDate,
1692
+ modelId: this.modelId,
1693
+ headers: responseHeaders,
1694
+ body: rawResponse
1695
+ }
1696
+ };
1697
+ }
1698
+ };
1699
+ var openaiTranscriptionResponseSchema = z6.object({
1700
+ text: z6.string(),
1701
+ language: z6.string().nullish(),
1702
+ duration: z6.number().nullish(),
1703
+ words: z6.array(
1704
+ z6.object({
1705
+ word: z6.string(),
1706
+ start: z6.number(),
1707
+ end: z6.number()
1708
+ })
1709
+ ).nullish()
1710
+ });
1711
+
1712
+ // src/responses/openai-responses-language-model.ts
1713
+ import {
1714
+ combineHeaders as combineHeaders6,
1715
+ createEventSourceResponseHandler as createEventSourceResponseHandler3,
1716
+ createJsonResponseHandler as createJsonResponseHandler6,
1717
+ generateId as generateId2,
1718
+ parseProviderOptions as parseProviderOptions2,
1719
+ postJsonToApi as postJsonToApi5
1720
+ } from "@ai-sdk/provider-utils";
1721
+ import { z as z7 } from "zod";
1538
1722
 
1539
1723
  // src/responses/convert-to-openai-responses-messages.ts
1540
1724
  import {
1541
1725
  UnsupportedFunctionalityError as UnsupportedFunctionalityError5
1542
1726
  } from "@ai-sdk/provider";
1543
- import { convertUint8ArrayToBase64 as convertUint8ArrayToBase642 } from "@ai-sdk/provider-utils";
1544
1727
  function convertToOpenAIResponsesMessages({
1545
1728
  prompt,
1546
1729
  systemMessageMode
@@ -1579,38 +1762,35 @@ function convertToOpenAIResponsesMessages({
1579
1762
  messages.push({
1580
1763
  role: "user",
1581
1764
  content: content.map((part, index) => {
1582
- var _a, _b, _c, _d;
1765
+ var _a, _b, _c;
1583
1766
  switch (part.type) {
1584
1767
  case "text": {
1585
1768
  return { type: "input_text", text: part.text };
1586
1769
  }
1587
- case "image": {
1588
- return {
1589
- type: "input_image",
1590
- image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase642(part.image)}`,
1591
- // OpenAI specific extension: image detail
1592
- detail: (_c = (_b = part.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
1593
- };
1594
- }
1595
1770
  case "file": {
1596
- if (part.data instanceof URL) {
1597
- throw new UnsupportedFunctionalityError5({
1598
- functionality: "File URLs in user messages"
1599
- });
1600
- }
1601
- switch (part.mimeType) {
1602
- case "application/pdf": {
1603
- return {
1604
- type: "input_file",
1605
- filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
1606
- file_data: `data:application/pdf;base64,${part.data}`
1607
- };
1608
- }
1609
- default: {
1771
+ if (part.mediaType.startsWith("image/")) {
1772
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
1773
+ return {
1774
+ type: "input_image",
1775
+ image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
1776
+ // OpenAI specific extension: image detail
1777
+ detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
1778
+ };
1779
+ } else if (part.mediaType === "application/pdf") {
1780
+ if (part.data instanceof URL) {
1610
1781
  throw new UnsupportedFunctionalityError5({
1611
- functionality: "Only PDF files are supported in user messages"
1782
+ functionality: "PDF file parts with URLs"
1612
1783
  });
1613
1784
  }
1785
+ return {
1786
+ type: "input_file",
1787
+ filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
1788
+ file_data: `data:application/pdf;base64,${part.data}`
1789
+ };
1790
+ } else {
1791
+ throw new UnsupportedFunctionalityError5({
1792
+ functionality: `file part media type ${part.mediaType}`
1793
+ });
1614
1794
  }
1615
1795
  }
1616
1796
  }
@@ -1741,7 +1921,7 @@ function prepareResponsesTools({
1741
1921
  default: {
1742
1922
  const _exhaustiveCheck = type;
1743
1923
  throw new UnsupportedFunctionalityError6({
1744
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
1924
+ functionality: `tool choice type: ${_exhaustiveCheck}`
1745
1925
  });
1746
1926
  }
1747
1927
  }
@@ -1802,7 +1982,7 @@ var OpenAIResponsesLanguageModel = class {
1802
1982
  systemMessageMode: modelConfig.systemMessageMode
1803
1983
  });
1804
1984
  warnings.push(...messageWarnings);
1805
- const openaiOptions = parseProviderOptions({
1985
+ const openaiOptions = parseProviderOptions2({
1806
1986
  provider: "openai",
1807
1987
  providerOptions,
1808
1988
  schema: openaiResponsesProviderOptionsSchema
@@ -1888,53 +2068,53 @@ var OpenAIResponsesLanguageModel = class {
1888
2068
  path: "/responses",
1889
2069
  modelId: this.modelId
1890
2070
  }),
1891
- headers: combineHeaders5(this.config.headers(), options.headers),
2071
+ headers: combineHeaders6(this.config.headers(), options.headers),
1892
2072
  body,
1893
2073
  failedResponseHandler: openaiFailedResponseHandler,
1894
- successfulResponseHandler: createJsonResponseHandler5(
1895
- z6.object({
1896
- id: z6.string(),
1897
- created_at: z6.number(),
1898
- model: z6.string(),
1899
- output: z6.array(
1900
- z6.discriminatedUnion("type", [
1901
- z6.object({
1902
- type: z6.literal("message"),
1903
- role: z6.literal("assistant"),
1904
- content: z6.array(
1905
- z6.object({
1906
- type: z6.literal("output_text"),
1907
- text: z6.string(),
1908
- annotations: z6.array(
1909
- z6.object({
1910
- type: z6.literal("url_citation"),
1911
- start_index: z6.number(),
1912
- end_index: z6.number(),
1913
- url: z6.string(),
1914
- title: z6.string()
2074
+ successfulResponseHandler: createJsonResponseHandler6(
2075
+ z7.object({
2076
+ id: z7.string(),
2077
+ created_at: z7.number(),
2078
+ model: z7.string(),
2079
+ output: z7.array(
2080
+ z7.discriminatedUnion("type", [
2081
+ z7.object({
2082
+ type: z7.literal("message"),
2083
+ role: z7.literal("assistant"),
2084
+ content: z7.array(
2085
+ z7.object({
2086
+ type: z7.literal("output_text"),
2087
+ text: z7.string(),
2088
+ annotations: z7.array(
2089
+ z7.object({
2090
+ type: z7.literal("url_citation"),
2091
+ start_index: z7.number(),
2092
+ end_index: z7.number(),
2093
+ url: z7.string(),
2094
+ title: z7.string()
1915
2095
  })
1916
2096
  )
1917
2097
  })
1918
2098
  )
1919
2099
  }),
1920
- z6.object({
1921
- type: z6.literal("function_call"),
1922
- call_id: z6.string(),
1923
- name: z6.string(),
1924
- arguments: z6.string()
2100
+ z7.object({
2101
+ type: z7.literal("function_call"),
2102
+ call_id: z7.string(),
2103
+ name: z7.string(),
2104
+ arguments: z7.string()
1925
2105
  }),
1926
- z6.object({
1927
- type: z6.literal("web_search_call")
2106
+ z7.object({
2107
+ type: z7.literal("web_search_call")
1928
2108
  }),
1929
- z6.object({
1930
- type: z6.literal("computer_call")
2109
+ z7.object({
2110
+ type: z7.literal("computer_call")
1931
2111
  }),
1932
- z6.object({
1933
- type: z6.literal("reasoning")
2112
+ z7.object({
2113
+ type: z7.literal("reasoning")
1934
2114
  })
1935
2115
  ])
1936
2116
  ),
1937
- incomplete_details: z6.object({ reason: z6.string() }).nullable(),
2117
+ incomplete_details: z7.object({ reason: z7.string() }).nullable(),
1938
2118
  usage: usageSchema
1939
2119
  })
1940
2120
  ),
@@ -2003,7 +2183,7 @@ var OpenAIResponsesLanguageModel = class {
2003
2183
  path: "/responses",
2004
2184
  modelId: this.modelId
2005
2185
  }),
2006
- headers: combineHeaders5(this.config.headers(), options.headers),
2186
+ headers: combineHeaders6(this.config.headers(), options.headers),
2007
2187
  body: {
2008
2188
  ...body,
2009
2189
  stream: true
@@ -2132,79 +2312,79 @@ var OpenAIResponsesLanguageModel = class {
2132
2312
  };
2133
2313
  }
2134
2314
  };
2135
- var usageSchema = z6.object({
2136
- input_tokens: z6.number(),
2137
- input_tokens_details: z6.object({ cached_tokens: z6.number().nullish() }).nullish(),
2138
- output_tokens: z6.number(),
2139
- output_tokens_details: z6.object({ reasoning_tokens: z6.number().nullish() }).nullish()
2315
+ var usageSchema = z7.object({
2316
+ input_tokens: z7.number(),
2317
+ input_tokens_details: z7.object({ cached_tokens: z7.number().nullish() }).nullish(),
2318
+ output_tokens: z7.number(),
2319
+ output_tokens_details: z7.object({ reasoning_tokens: z7.number().nullish() }).nullish()
2140
2320
  });
2141
- var textDeltaChunkSchema = z6.object({
2142
- type: z6.literal("response.output_text.delta"),
2143
- delta: z6.string()
2321
+ var textDeltaChunkSchema = z7.object({
2322
+ type: z7.literal("response.output_text.delta"),
2323
+ delta: z7.string()
2144
2324
  });
2145
- var responseFinishedChunkSchema = z6.object({
2146
- type: z6.enum(["response.completed", "response.incomplete"]),
2147
- response: z6.object({
2148
- incomplete_details: z6.object({ reason: z6.string() }).nullish(),
2325
+ var responseFinishedChunkSchema = z7.object({
2326
+ type: z7.enum(["response.completed", "response.incomplete"]),
2327
+ response: z7.object({
2328
+ incomplete_details: z7.object({ reason: z7.string() }).nullish(),
2149
2329
  usage: usageSchema
2150
2330
  })
2151
2331
  });
2152
- var responseCreatedChunkSchema = z6.object({
2153
- type: z6.literal("response.created"),
2154
- response: z6.object({
2155
- id: z6.string(),
2156
- created_at: z6.number(),
2157
- model: z6.string()
2332
+ var responseCreatedChunkSchema = z7.object({
2333
+ type: z7.literal("response.created"),
2334
+ response: z7.object({
2335
+ id: z7.string(),
2336
+ created_at: z7.number(),
2337
+ model: z7.string()
2158
2338
  })
2159
2339
  });
2160
- var responseOutputItemDoneSchema = z6.object({
2161
- type: z6.literal("response.output_item.done"),
2162
- output_index: z6.number(),
2163
- item: z6.discriminatedUnion("type", [
2164
- z6.object({
2165
- type: z6.literal("message")
2340
+ var responseOutputItemDoneSchema = z7.object({
2341
+ type: z7.literal("response.output_item.done"),
2342
+ output_index: z7.number(),
2343
+ item: z7.discriminatedUnion("type", [
2344
+ z7.object({
2345
+ type: z7.literal("message")
2166
2346
  }),
2167
- z6.object({
2168
- type: z6.literal("function_call"),
2169
- id: z6.string(),
2170
- call_id: z6.string(),
2171
- name: z6.string(),
2172
- arguments: z6.string(),
2173
- status: z6.literal("completed")
2347
+ z7.object({
2348
+ type: z7.literal("function_call"),
2349
+ id: z7.string(),
2350
+ call_id: z7.string(),
2351
+ name: z7.string(),
2352
+ arguments: z7.string(),
2353
+ status: z7.literal("completed")
2174
2354
  })
2175
2355
  ])
2176
2356
  });
2177
- var responseFunctionCallArgumentsDeltaSchema = z6.object({
2178
- type: z6.literal("response.function_call_arguments.delta"),
2179
- item_id: z6.string(),
2180
- output_index: z6.number(),
2181
- delta: z6.string()
2357
+ var responseFunctionCallArgumentsDeltaSchema = z7.object({
2358
+ type: z7.literal("response.function_call_arguments.delta"),
2359
+ item_id: z7.string(),
2360
+ output_index: z7.number(),
2361
+ delta: z7.string()
2182
2362
  });
2183
- var responseOutputItemAddedSchema = z6.object({
2184
- type: z6.literal("response.output_item.added"),
2185
- output_index: z6.number(),
2186
- item: z6.discriminatedUnion("type", [
2187
- z6.object({
2188
- type: z6.literal("message")
2363
+ var responseOutputItemAddedSchema = z7.object({
2364
+ type: z7.literal("response.output_item.added"),
2365
+ output_index: z7.number(),
2366
+ item: z7.discriminatedUnion("type", [
2367
+ z7.object({
2368
+ type: z7.literal("message")
2189
2369
  }),
2190
- z6.object({
2191
- type: z6.literal("function_call"),
2192
- id: z6.string(),
2193
- call_id: z6.string(),
2194
- name: z6.string(),
2195
- arguments: z6.string()
2370
+ z7.object({
2371
+ type: z7.literal("function_call"),
2372
+ id: z7.string(),
2373
+ call_id: z7.string(),
2374
+ name: z7.string(),
2375
+ arguments: z7.string()
2196
2376
  })
2197
2377
  ])
2198
2378
  });
2199
- var responseAnnotationAddedSchema = z6.object({
2200
- type: z6.literal("response.output_text.annotation.added"),
2201
- annotation: z6.object({
2202
- type: z6.literal("url_citation"),
2203
- url: z6.string(),
2204
- title: z6.string()
2379
+ var responseAnnotationAddedSchema = z7.object({
2380
+ type: z7.literal("response.output_text.annotation.added"),
2381
+ annotation: z7.object({
2382
+ type: z7.literal("url_citation"),
2383
+ url: z7.string(),
2384
+ title: z7.string()
2205
2385
  })
2206
2386
  });
2207
- var openaiResponsesChunkSchema = z6.union([
2387
+ var openaiResponsesChunkSchema = z7.union([
2208
2388
  textDeltaChunkSchema,
2209
2389
  responseFinishedChunkSchema,
2210
2390
  responseCreatedChunkSchema,
@@ -2212,7 +2392,7 @@ var openaiResponsesChunkSchema = z6.union([
2212
2392
  responseFunctionCallArgumentsDeltaSchema,
2213
2393
  responseOutputItemAddedSchema,
2214
2394
  responseAnnotationAddedSchema,
2215
- z6.object({ type: z6.string() }).passthrough()
2395
+ z7.object({ type: z7.string() }).passthrough()
2216
2396
  // fallback for unknown chunks
2217
2397
  ]);
2218
2398
  function isTextDeltaChunk(chunk) {
@@ -2257,15 +2437,15 @@ function getResponsesModelConfig(modelId) {
2257
2437
  requiredAutoTruncation: false
2258
2438
  };
2259
2439
  }
2260
- var openaiResponsesProviderOptionsSchema = z6.object({
2261
- metadata: z6.any().nullish(),
2262
- parallelToolCalls: z6.boolean().nullish(),
2263
- previousResponseId: z6.string().nullish(),
2264
- store: z6.boolean().nullish(),
2265
- user: z6.string().nullish(),
2266
- reasoningEffort: z6.string().nullish(),
2267
- strictSchemas: z6.boolean().nullish(),
2268
- instructions: z6.string().nullish()
2440
+ var openaiResponsesProviderOptionsSchema = z7.object({
2441
+ metadata: z7.any().nullish(),
2442
+ parallelToolCalls: z7.boolean().nullish(),
2443
+ previousResponseId: z7.string().nullish(),
2444
+ store: z7.boolean().nullish(),
2445
+ user: z7.string().nullish(),
2446
+ reasoningEffort: z7.string().nullish(),
2447
+ strictSchemas: z7.boolean().nullish(),
2448
+ instructions: z7.string().nullish()
2269
2449
  });
2270
2450
  export {
2271
2451
  OpenAIChatLanguageModel,
@@ -2273,6 +2453,7 @@ export {
2273
2453
  OpenAIEmbeddingModel,
2274
2454
  OpenAIImageModel,
2275
2455
  OpenAIResponsesLanguageModel,
2456
+ OpenAITranscriptionModel,
2276
2457
  modelMaxImagesPerCall
2277
2458
  };
2278
2459
  //# sourceMappingURL=index.mjs.map