@ai-sdk/openai 2.0.39 → 2.0.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -27,7 +27,7 @@ __export(src_exports, {
27
27
  module.exports = __toCommonJS(src_exports);
28
28
 
29
29
  // src/openai-provider.ts
30
- var import_provider_utils16 = require("@ai-sdk/provider-utils");
30
+ var import_provider_utils17 = require("@ai-sdk/provider-utils");
31
31
 
32
32
  // src/chat/openai-chat-language-model.ts
33
33
  var import_provider3 = require("@ai-sdk/provider");
@@ -1783,39 +1783,62 @@ var imageGeneration = (args = {}) => {
1783
1783
  return imageGenerationToolFactory(args);
1784
1784
  };
1785
1785
 
1786
- // src/tool/web-search.ts
1786
+ // src/tool/local-shell.ts
1787
1787
  var import_provider_utils10 = require("@ai-sdk/provider-utils");
1788
1788
  var import_v412 = require("zod/v4");
1789
- var webSearchArgsSchema = import_v412.z.object({
1790
- filters: import_v412.z.object({
1791
- allowedDomains: import_v412.z.array(import_v412.z.string()).optional()
1789
+ var localShellInputSchema = import_v412.z.object({
1790
+ action: import_v412.z.object({
1791
+ type: import_v412.z.literal("exec"),
1792
+ command: import_v412.z.array(import_v412.z.string()),
1793
+ timeoutMs: import_v412.z.number().optional(),
1794
+ user: import_v412.z.string().optional(),
1795
+ workingDirectory: import_v412.z.string().optional(),
1796
+ env: import_v412.z.record(import_v412.z.string(), import_v412.z.string()).optional()
1797
+ })
1798
+ });
1799
+ var localShellOutputSchema = import_v412.z.object({
1800
+ output: import_v412.z.string()
1801
+ });
1802
+ var localShell = (0, import_provider_utils10.createProviderDefinedToolFactoryWithOutputSchema)({
1803
+ id: "openai.local_shell",
1804
+ name: "local_shell",
1805
+ inputSchema: localShellInputSchema,
1806
+ outputSchema: localShellOutputSchema
1807
+ });
1808
+
1809
+ // src/tool/web-search.ts
1810
+ var import_provider_utils11 = require("@ai-sdk/provider-utils");
1811
+ var import_v413 = require("zod/v4");
1812
+ var webSearchArgsSchema = import_v413.z.object({
1813
+ filters: import_v413.z.object({
1814
+ allowedDomains: import_v413.z.array(import_v413.z.string()).optional()
1792
1815
  }).optional(),
1793
- searchContextSize: import_v412.z.enum(["low", "medium", "high"]).optional(),
1794
- userLocation: import_v412.z.object({
1795
- type: import_v412.z.literal("approximate"),
1796
- country: import_v412.z.string().optional(),
1797
- city: import_v412.z.string().optional(),
1798
- region: import_v412.z.string().optional(),
1799
- timezone: import_v412.z.string().optional()
1816
+ searchContextSize: import_v413.z.enum(["low", "medium", "high"]).optional(),
1817
+ userLocation: import_v413.z.object({
1818
+ type: import_v413.z.literal("approximate"),
1819
+ country: import_v413.z.string().optional(),
1820
+ city: import_v413.z.string().optional(),
1821
+ region: import_v413.z.string().optional(),
1822
+ timezone: import_v413.z.string().optional()
1800
1823
  }).optional()
1801
1824
  });
1802
- var webSearchToolFactory = (0, import_provider_utils10.createProviderDefinedToolFactory)({
1825
+ var webSearchToolFactory = (0, import_provider_utils11.createProviderDefinedToolFactory)({
1803
1826
  id: "openai.web_search",
1804
1827
  name: "web_search",
1805
- inputSchema: import_v412.z.object({
1806
- action: import_v412.z.discriminatedUnion("type", [
1807
- import_v412.z.object({
1808
- type: import_v412.z.literal("search"),
1809
- query: import_v412.z.string().nullish()
1828
+ inputSchema: import_v413.z.object({
1829
+ action: import_v413.z.discriminatedUnion("type", [
1830
+ import_v413.z.object({
1831
+ type: import_v413.z.literal("search"),
1832
+ query: import_v413.z.string().nullish()
1810
1833
  }),
1811
- import_v412.z.object({
1812
- type: import_v412.z.literal("open_page"),
1813
- url: import_v412.z.string()
1834
+ import_v413.z.object({
1835
+ type: import_v413.z.literal("open_page"),
1836
+ url: import_v413.z.string()
1814
1837
  }),
1815
- import_v412.z.object({
1816
- type: import_v412.z.literal("find"),
1817
- url: import_v412.z.string(),
1818
- pattern: import_v412.z.string()
1838
+ import_v413.z.object({
1839
+ type: import_v413.z.literal("find"),
1840
+ url: import_v413.z.string(),
1841
+ pattern: import_v413.z.string()
1819
1842
  })
1820
1843
  ]).nullish()
1821
1844
  })
@@ -1825,59 +1848,59 @@ var webSearch = (args = {}) => {
1825
1848
  };
1826
1849
 
1827
1850
  // src/tool/web-search-preview.ts
1828
- var import_provider_utils11 = require("@ai-sdk/provider-utils");
1829
- var import_v413 = require("zod/v4");
1830
- var webSearchPreviewArgsSchema = import_v413.z.object({
1851
+ var import_provider_utils12 = require("@ai-sdk/provider-utils");
1852
+ var import_v414 = require("zod/v4");
1853
+ var webSearchPreviewArgsSchema = import_v414.z.object({
1831
1854
  /**
1832
1855
  * Search context size to use for the web search.
1833
1856
  * - high: Most comprehensive context, highest cost, slower response
1834
1857
  * - medium: Balanced context, cost, and latency (default)
1835
1858
  * - low: Least context, lowest cost, fastest response
1836
1859
  */
1837
- searchContextSize: import_v413.z.enum(["low", "medium", "high"]).optional(),
1860
+ searchContextSize: import_v414.z.enum(["low", "medium", "high"]).optional(),
1838
1861
  /**
1839
1862
  * User location information to provide geographically relevant search results.
1840
1863
  */
1841
- userLocation: import_v413.z.object({
1864
+ userLocation: import_v414.z.object({
1842
1865
  /**
1843
1866
  * Type of location (always 'approximate')
1844
1867
  */
1845
- type: import_v413.z.literal("approximate"),
1868
+ type: import_v414.z.literal("approximate"),
1846
1869
  /**
1847
1870
  * Two-letter ISO country code (e.g., 'US', 'GB')
1848
1871
  */
1849
- country: import_v413.z.string().optional(),
1872
+ country: import_v414.z.string().optional(),
1850
1873
  /**
1851
1874
  * City name (free text, e.g., 'Minneapolis')
1852
1875
  */
1853
- city: import_v413.z.string().optional(),
1876
+ city: import_v414.z.string().optional(),
1854
1877
  /**
1855
1878
  * Region name (free text, e.g., 'Minnesota')
1856
1879
  */
1857
- region: import_v413.z.string().optional(),
1880
+ region: import_v414.z.string().optional(),
1858
1881
  /**
1859
1882
  * IANA timezone (e.g., 'America/Chicago')
1860
1883
  */
1861
- timezone: import_v413.z.string().optional()
1884
+ timezone: import_v414.z.string().optional()
1862
1885
  }).optional()
1863
1886
  });
1864
- var webSearchPreview = (0, import_provider_utils11.createProviderDefinedToolFactory)({
1887
+ var webSearchPreview = (0, import_provider_utils12.createProviderDefinedToolFactory)({
1865
1888
  id: "openai.web_search_preview",
1866
1889
  name: "web_search_preview",
1867
- inputSchema: import_v413.z.object({
1868
- action: import_v413.z.discriminatedUnion("type", [
1869
- import_v413.z.object({
1870
- type: import_v413.z.literal("search"),
1871
- query: import_v413.z.string().nullish()
1890
+ inputSchema: import_v414.z.object({
1891
+ action: import_v414.z.discriminatedUnion("type", [
1892
+ import_v414.z.object({
1893
+ type: import_v414.z.literal("search"),
1894
+ query: import_v414.z.string().nullish()
1872
1895
  }),
1873
- import_v413.z.object({
1874
- type: import_v413.z.literal("open_page"),
1875
- url: import_v413.z.string()
1896
+ import_v414.z.object({
1897
+ type: import_v414.z.literal("open_page"),
1898
+ url: import_v414.z.string()
1876
1899
  }),
1877
- import_v413.z.object({
1878
- type: import_v413.z.literal("find"),
1879
- url: import_v413.z.string(),
1880
- pattern: import_v413.z.string()
1900
+ import_v414.z.object({
1901
+ type: import_v414.z.literal("find"),
1902
+ url: import_v414.z.string(),
1903
+ pattern: import_v414.z.string()
1881
1904
  })
1882
1905
  ]).nullish()
1883
1906
  })
@@ -1922,6 +1945,15 @@ var openaiTools = {
1922
1945
  * @param background - Transparent or opaque
1923
1946
  */
1924
1947
  imageGeneration,
1948
+ /**
1949
+ * Local shell is a tool that allows agents to run shell commands locally
1950
+ * on a machine you or the user provides.
1951
+ *
1952
+ * Supported models: `gpt-5-codex` and `codex-mini-latest`
1953
+ *
1954
+ * Must have name `local_shell`.
1955
+ */
1956
+ localShell,
1925
1957
  /**
1926
1958
  * Web search allows models to access up-to-date information from the internet
1927
1959
  * and provide answers with sourced citations.
@@ -1949,13 +1981,13 @@ var openaiTools = {
1949
1981
 
1950
1982
  // src/responses/openai-responses-language-model.ts
1951
1983
  var import_provider8 = require("@ai-sdk/provider");
1952
- var import_provider_utils13 = require("@ai-sdk/provider-utils");
1953
- var import_v415 = require("zod/v4");
1984
+ var import_provider_utils14 = require("@ai-sdk/provider-utils");
1985
+ var import_v416 = require("zod/v4");
1954
1986
 
1955
1987
  // src/responses/convert-to-openai-responses-input.ts
1956
1988
  var import_provider6 = require("@ai-sdk/provider");
1957
- var import_provider_utils12 = require("@ai-sdk/provider-utils");
1958
- var import_v414 = require("zod/v4");
1989
+ var import_provider_utils13 = require("@ai-sdk/provider-utils");
1990
+ var import_v415 = require("zod/v4");
1959
1991
  function isFileId(data, prefixes) {
1960
1992
  if (!prefixes) return false;
1961
1993
  return prefixes.some((prefix) => data.startsWith(prefix));
@@ -1964,9 +1996,10 @@ async function convertToOpenAIResponsesInput({
1964
1996
  prompt,
1965
1997
  systemMessageMode,
1966
1998
  fileIdPrefixes,
1967
- store
1999
+ store,
2000
+ hasLocalShellTool = false
1968
2001
  }) {
1969
- var _a, _b, _c, _d, _e, _f;
2002
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i;
1970
2003
  const input = [];
1971
2004
  const warnings = [];
1972
2005
  for (const { role, content } of prompt) {
@@ -2012,7 +2045,7 @@ async function convertToOpenAIResponsesInput({
2012
2045
  return {
2013
2046
  type: "input_image",
2014
2047
  ...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
2015
- image_url: `data:${mediaType};base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
2048
+ image_url: `data:${mediaType};base64,${(0, import_provider_utils13.convertToBase64)(part.data)}`
2016
2049
  },
2017
2050
  detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
2018
2051
  };
@@ -2027,7 +2060,7 @@ async function convertToOpenAIResponsesInput({
2027
2060
  type: "input_file",
2028
2061
  ...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
2029
2062
  filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
2030
- file_data: `data:application/pdf;base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
2063
+ file_data: `data:application/pdf;base64,${(0, import_provider_utils13.convertToBase64)(part.data)}`
2031
2064
  }
2032
2065
  };
2033
2066
  } else {
@@ -2059,12 +2092,29 @@ async function convertToOpenAIResponsesInput({
2059
2092
  if (part.providerExecuted) {
2060
2093
  break;
2061
2094
  }
2095
+ if (hasLocalShellTool && part.toolName === "local_shell") {
2096
+ const parsedInput = localShellInputSchema.parse(part.input);
2097
+ input.push({
2098
+ type: "local_shell_call",
2099
+ call_id: part.toolCallId,
2100
+ id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0,
2101
+ action: {
2102
+ type: "exec",
2103
+ command: parsedInput.action.command,
2104
+ timeout_ms: parsedInput.action.timeoutMs,
2105
+ user: parsedInput.action.user,
2106
+ working_directory: parsedInput.action.workingDirectory,
2107
+ env: parsedInput.action.env
2108
+ }
2109
+ });
2110
+ break;
2111
+ }
2062
2112
  input.push({
2063
2113
  type: "function_call",
2064
2114
  call_id: part.toolCallId,
2065
2115
  name: part.toolName,
2066
2116
  arguments: JSON.stringify(part.input),
2067
- id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0
2117
+ id: (_i = (_h = (_g = part.providerOptions) == null ? void 0 : _g.openai) == null ? void 0 : _h.itemId) != null ? _i : void 0
2068
2118
  });
2069
2119
  break;
2070
2120
  }
@@ -2081,7 +2131,7 @@ async function convertToOpenAIResponsesInput({
2081
2131
  break;
2082
2132
  }
2083
2133
  case "reasoning": {
2084
- const providerOptions = await (0, import_provider_utils12.parseProviderOptions)({
2134
+ const providerOptions = await (0, import_provider_utils13.parseProviderOptions)({
2085
2135
  provider: "openai",
2086
2136
  providerOptions: part.providerOptions,
2087
2137
  schema: openaiResponsesReasoningProviderOptionsSchema
@@ -2138,6 +2188,14 @@ async function convertToOpenAIResponsesInput({
2138
2188
  case "tool": {
2139
2189
  for (const part of content) {
2140
2190
  const output = part.output;
2191
+ if (hasLocalShellTool && part.toolName === "local_shell" && output.type === "json") {
2192
+ input.push({
2193
+ type: "local_shell_call_output",
2194
+ call_id: part.toolCallId,
2195
+ output: localShellOutputSchema.parse(output.value).output
2196
+ });
2197
+ break;
2198
+ }
2141
2199
  let contentValue;
2142
2200
  switch (output.type) {
2143
2201
  case "text":
@@ -2166,9 +2224,9 @@ async function convertToOpenAIResponsesInput({
2166
2224
  }
2167
2225
  return { input, warnings };
2168
2226
  }
2169
- var openaiResponsesReasoningProviderOptionsSchema = import_v414.z.object({
2170
- itemId: import_v414.z.string().nullish(),
2171
- reasoningEncryptedContent: import_v414.z.string().nullish()
2227
+ var openaiResponsesReasoningProviderOptionsSchema = import_v415.z.object({
2228
+ itemId: import_v415.z.string().nullish(),
2229
+ reasoningEncryptedContent: import_v415.z.string().nullish()
2172
2230
  });
2173
2231
 
2174
2232
  // src/responses/map-openai-responses-finish-reason.ts
@@ -2229,6 +2287,12 @@ function prepareResponsesTools({
2229
2287
  });
2230
2288
  break;
2231
2289
  }
2290
+ case "openai.local_shell": {
2291
+ openaiTools2.push({
2292
+ type: "local_shell"
2293
+ });
2294
+ break;
2295
+ }
2232
2296
  case "openai.web_search_preview": {
2233
2297
  const args = webSearchPreviewArgsSchema.parse(tool.args);
2234
2298
  openaiTools2.push({
@@ -2308,66 +2372,79 @@ function prepareResponsesTools({
2308
2372
  }
2309
2373
 
2310
2374
  // src/responses/openai-responses-language-model.ts
2311
- var webSearchCallItem = import_v415.z.object({
2312
- type: import_v415.z.literal("web_search_call"),
2313
- id: import_v415.z.string(),
2314
- status: import_v415.z.string(),
2315
- action: import_v415.z.discriminatedUnion("type", [
2316
- import_v415.z.object({
2317
- type: import_v415.z.literal("search"),
2318
- query: import_v415.z.string().nullish()
2375
+ var webSearchCallItem = import_v416.z.object({
2376
+ type: import_v416.z.literal("web_search_call"),
2377
+ id: import_v416.z.string(),
2378
+ status: import_v416.z.string(),
2379
+ action: import_v416.z.discriminatedUnion("type", [
2380
+ import_v416.z.object({
2381
+ type: import_v416.z.literal("search"),
2382
+ query: import_v416.z.string().nullish()
2319
2383
  }),
2320
- import_v415.z.object({
2321
- type: import_v415.z.literal("open_page"),
2322
- url: import_v415.z.string()
2384
+ import_v416.z.object({
2385
+ type: import_v416.z.literal("open_page"),
2386
+ url: import_v416.z.string()
2323
2387
  }),
2324
- import_v415.z.object({
2325
- type: import_v415.z.literal("find"),
2326
- url: import_v415.z.string(),
2327
- pattern: import_v415.z.string()
2388
+ import_v416.z.object({
2389
+ type: import_v416.z.literal("find"),
2390
+ url: import_v416.z.string(),
2391
+ pattern: import_v416.z.string()
2328
2392
  })
2329
2393
  ]).nullish()
2330
2394
  });
2331
- var fileSearchCallItem = import_v415.z.object({
2332
- type: import_v415.z.literal("file_search_call"),
2333
- id: import_v415.z.string(),
2334
- queries: import_v415.z.array(import_v415.z.string()),
2335
- results: import_v415.z.array(
2336
- import_v415.z.object({
2337
- attributes: import_v415.z.record(import_v415.z.string(), import_v415.z.unknown()),
2338
- file_id: import_v415.z.string(),
2339
- filename: import_v415.z.string(),
2340
- score: import_v415.z.number(),
2341
- text: import_v415.z.string()
2395
+ var fileSearchCallItem = import_v416.z.object({
2396
+ type: import_v416.z.literal("file_search_call"),
2397
+ id: import_v416.z.string(),
2398
+ queries: import_v416.z.array(import_v416.z.string()),
2399
+ results: import_v416.z.array(
2400
+ import_v416.z.object({
2401
+ attributes: import_v416.z.record(import_v416.z.string(), import_v416.z.unknown()),
2402
+ file_id: import_v416.z.string(),
2403
+ filename: import_v416.z.string(),
2404
+ score: import_v416.z.number(),
2405
+ text: import_v416.z.string()
2342
2406
  })
2343
2407
  ).nullish()
2344
2408
  });
2345
- var codeInterpreterCallItem = import_v415.z.object({
2346
- type: import_v415.z.literal("code_interpreter_call"),
2347
- id: import_v415.z.string(),
2348
- code: import_v415.z.string().nullable(),
2349
- container_id: import_v415.z.string(),
2350
- outputs: import_v415.z.array(
2351
- import_v415.z.discriminatedUnion("type", [
2352
- import_v415.z.object({ type: import_v415.z.literal("logs"), logs: import_v415.z.string() }),
2353
- import_v415.z.object({ type: import_v415.z.literal("image"), url: import_v415.z.string() })
2409
+ var codeInterpreterCallItem = import_v416.z.object({
2410
+ type: import_v416.z.literal("code_interpreter_call"),
2411
+ id: import_v416.z.string(),
2412
+ code: import_v416.z.string().nullable(),
2413
+ container_id: import_v416.z.string(),
2414
+ outputs: import_v416.z.array(
2415
+ import_v416.z.discriminatedUnion("type", [
2416
+ import_v416.z.object({ type: import_v416.z.literal("logs"), logs: import_v416.z.string() }),
2417
+ import_v416.z.object({ type: import_v416.z.literal("image"), url: import_v416.z.string() })
2354
2418
  ])
2355
2419
  ).nullable()
2356
2420
  });
2357
- var imageGenerationCallItem = import_v415.z.object({
2358
- type: import_v415.z.literal("image_generation_call"),
2359
- id: import_v415.z.string(),
2360
- result: import_v415.z.string()
2421
+ var localShellCallItem = import_v416.z.object({
2422
+ type: import_v416.z.literal("local_shell_call"),
2423
+ id: import_v416.z.string(),
2424
+ call_id: import_v416.z.string(),
2425
+ action: import_v416.z.object({
2426
+ type: import_v416.z.literal("exec"),
2427
+ command: import_v416.z.array(import_v416.z.string()),
2428
+ timeout_ms: import_v416.z.number().optional(),
2429
+ user: import_v416.z.string().optional(),
2430
+ working_directory: import_v416.z.string().optional(),
2431
+ env: import_v416.z.record(import_v416.z.string(), import_v416.z.string()).optional()
2432
+ })
2433
+ });
2434
+ var imageGenerationCallItem = import_v416.z.object({
2435
+ type: import_v416.z.literal("image_generation_call"),
2436
+ id: import_v416.z.string(),
2437
+ result: import_v416.z.string()
2361
2438
  });
2362
2439
  var TOP_LOGPROBS_MAX = 20;
2363
- var LOGPROBS_SCHEMA = import_v415.z.array(
2364
- import_v415.z.object({
2365
- token: import_v415.z.string(),
2366
- logprob: import_v415.z.number(),
2367
- top_logprobs: import_v415.z.array(
2368
- import_v415.z.object({
2369
- token: import_v415.z.string(),
2370
- logprob: import_v415.z.number()
2440
+ var LOGPROBS_SCHEMA = import_v416.z.array(
2441
+ import_v416.z.object({
2442
+ token: import_v416.z.string(),
2443
+ logprob: import_v416.z.number(),
2444
+ top_logprobs: import_v416.z.array(
2445
+ import_v416.z.object({
2446
+ token: import_v416.z.string(),
2447
+ logprob: import_v416.z.number()
2371
2448
  })
2372
2449
  )
2373
2450
  })
@@ -2424,7 +2501,7 @@ var OpenAIResponsesLanguageModel = class {
2424
2501
  if (stopSequences != null) {
2425
2502
  warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
2426
2503
  }
2427
- const openaiOptions = await (0, import_provider_utils13.parseProviderOptions)({
2504
+ const openaiOptions = await (0, import_provider_utils14.parseProviderOptions)({
2428
2505
  provider: "openai",
2429
2506
  providerOptions,
2430
2507
  schema: openaiResponsesProviderOptionsSchema
@@ -2433,7 +2510,8 @@ var OpenAIResponsesLanguageModel = class {
2433
2510
  prompt,
2434
2511
  systemMessageMode: modelConfig.systemMessageMode,
2435
2512
  fileIdPrefixes: this.config.fileIdPrefixes,
2436
- store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true
2513
+ store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true,
2514
+ hasLocalShellTool: hasOpenAITool("openai.local_shell")
2437
2515
  });
2438
2516
  warnings.push(...inputWarnings);
2439
2517
  const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
@@ -2592,51 +2670,51 @@ var OpenAIResponsesLanguageModel = class {
2592
2670
  responseHeaders,
2593
2671
  value: response,
2594
2672
  rawValue: rawResponse
2595
- } = await (0, import_provider_utils13.postJsonToApi)({
2673
+ } = await (0, import_provider_utils14.postJsonToApi)({
2596
2674
  url,
2597
- headers: (0, import_provider_utils13.combineHeaders)(this.config.headers(), options.headers),
2675
+ headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
2598
2676
  body,
2599
2677
  failedResponseHandler: openaiFailedResponseHandler,
2600
- successfulResponseHandler: (0, import_provider_utils13.createJsonResponseHandler)(
2601
- import_v415.z.object({
2602
- id: import_v415.z.string(),
2603
- created_at: import_v415.z.number(),
2604
- error: import_v415.z.object({
2605
- code: import_v415.z.string(),
2606
- message: import_v415.z.string()
2678
+ successfulResponseHandler: (0, import_provider_utils14.createJsonResponseHandler)(
2679
+ import_v416.z.object({
2680
+ id: import_v416.z.string(),
2681
+ created_at: import_v416.z.number(),
2682
+ error: import_v416.z.object({
2683
+ code: import_v416.z.string(),
2684
+ message: import_v416.z.string()
2607
2685
  }).nullish(),
2608
- model: import_v415.z.string(),
2609
- output: import_v415.z.array(
2610
- import_v415.z.discriminatedUnion("type", [
2611
- import_v415.z.object({
2612
- type: import_v415.z.literal("message"),
2613
- role: import_v415.z.literal("assistant"),
2614
- id: import_v415.z.string(),
2615
- content: import_v415.z.array(
2616
- import_v415.z.object({
2617
- type: import_v415.z.literal("output_text"),
2618
- text: import_v415.z.string(),
2686
+ model: import_v416.z.string(),
2687
+ output: import_v416.z.array(
2688
+ import_v416.z.discriminatedUnion("type", [
2689
+ import_v416.z.object({
2690
+ type: import_v416.z.literal("message"),
2691
+ role: import_v416.z.literal("assistant"),
2692
+ id: import_v416.z.string(),
2693
+ content: import_v416.z.array(
2694
+ import_v416.z.object({
2695
+ type: import_v416.z.literal("output_text"),
2696
+ text: import_v416.z.string(),
2619
2697
  logprobs: LOGPROBS_SCHEMA.nullish(),
2620
- annotations: import_v415.z.array(
2621
- import_v415.z.discriminatedUnion("type", [
2622
- import_v415.z.object({
2623
- type: import_v415.z.literal("url_citation"),
2624
- start_index: import_v415.z.number(),
2625
- end_index: import_v415.z.number(),
2626
- url: import_v415.z.string(),
2627
- title: import_v415.z.string()
2698
+ annotations: import_v416.z.array(
2699
+ import_v416.z.discriminatedUnion("type", [
2700
+ import_v416.z.object({
2701
+ type: import_v416.z.literal("url_citation"),
2702
+ start_index: import_v416.z.number(),
2703
+ end_index: import_v416.z.number(),
2704
+ url: import_v416.z.string(),
2705
+ title: import_v416.z.string()
2628
2706
  }),
2629
- import_v415.z.object({
2630
- type: import_v415.z.literal("file_citation"),
2631
- file_id: import_v415.z.string(),
2632
- filename: import_v415.z.string().nullish(),
2633
- index: import_v415.z.number().nullish(),
2634
- start_index: import_v415.z.number().nullish(),
2635
- end_index: import_v415.z.number().nullish(),
2636
- quote: import_v415.z.string().nullish()
2707
+ import_v416.z.object({
2708
+ type: import_v416.z.literal("file_citation"),
2709
+ file_id: import_v416.z.string(),
2710
+ filename: import_v416.z.string().nullish(),
2711
+ index: import_v416.z.number().nullish(),
2712
+ start_index: import_v416.z.number().nullish(),
2713
+ end_index: import_v416.z.number().nullish(),
2714
+ quote: import_v416.z.string().nullish()
2637
2715
  }),
2638
- import_v415.z.object({
2639
- type: import_v415.z.literal("container_file_citation")
2716
+ import_v416.z.object({
2717
+ type: import_v416.z.literal("container_file_citation")
2640
2718
  })
2641
2719
  ])
2642
2720
  )
@@ -2647,33 +2725,34 @@ var OpenAIResponsesLanguageModel = class {
2647
2725
  fileSearchCallItem,
2648
2726
  codeInterpreterCallItem,
2649
2727
  imageGenerationCallItem,
2650
- import_v415.z.object({
2651
- type: import_v415.z.literal("function_call"),
2652
- call_id: import_v415.z.string(),
2653
- name: import_v415.z.string(),
2654
- arguments: import_v415.z.string(),
2655
- id: import_v415.z.string()
2728
+ localShellCallItem,
2729
+ import_v416.z.object({
2730
+ type: import_v416.z.literal("function_call"),
2731
+ call_id: import_v416.z.string(),
2732
+ name: import_v416.z.string(),
2733
+ arguments: import_v416.z.string(),
2734
+ id: import_v416.z.string()
2656
2735
  }),
2657
- import_v415.z.object({
2658
- type: import_v415.z.literal("computer_call"),
2659
- id: import_v415.z.string(),
2660
- status: import_v415.z.string().optional()
2736
+ import_v416.z.object({
2737
+ type: import_v416.z.literal("computer_call"),
2738
+ id: import_v416.z.string(),
2739
+ status: import_v416.z.string().optional()
2661
2740
  }),
2662
- import_v415.z.object({
2663
- type: import_v415.z.literal("reasoning"),
2664
- id: import_v415.z.string(),
2665
- encrypted_content: import_v415.z.string().nullish(),
2666
- summary: import_v415.z.array(
2667
- import_v415.z.object({
2668
- type: import_v415.z.literal("summary_text"),
2669
- text: import_v415.z.string()
2741
+ import_v416.z.object({
2742
+ type: import_v416.z.literal("reasoning"),
2743
+ id: import_v416.z.string(),
2744
+ encrypted_content: import_v416.z.string().nullish(),
2745
+ summary: import_v416.z.array(
2746
+ import_v416.z.object({
2747
+ type: import_v416.z.literal("summary_text"),
2748
+ text: import_v416.z.string()
2670
2749
  })
2671
2750
  )
2672
2751
  })
2673
2752
  ])
2674
2753
  ),
2675
- service_tier: import_v415.z.string().nullish(),
2676
- incomplete_details: import_v415.z.object({ reason: import_v415.z.string() }).nullish(),
2754
+ service_tier: import_v416.z.string().nullish(),
2755
+ incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
2677
2756
  usage: usageSchema2
2678
2757
  })
2679
2758
  ),
@@ -2733,6 +2812,20 @@ var OpenAIResponsesLanguageModel = class {
2733
2812
  });
2734
2813
  break;
2735
2814
  }
2815
+ case "local_shell_call": {
2816
+ content.push({
2817
+ type: "tool-call",
2818
+ toolCallId: part.call_id,
2819
+ toolName: "local_shell",
2820
+ input: JSON.stringify({ action: part.action }),
2821
+ providerMetadata: {
2822
+ openai: {
2823
+ itemId: part.id
2824
+ }
2825
+ }
2826
+ });
2827
+ break;
2828
+ }
2736
2829
  case "message": {
2737
2830
  for (const contentPart of part.content) {
2738
2831
  if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
@@ -2752,7 +2845,7 @@ var OpenAIResponsesLanguageModel = class {
2752
2845
  content.push({
2753
2846
  type: "source",
2754
2847
  sourceType: "url",
2755
- id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils13.generateId)(),
2848
+ id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils14.generateId)(),
2756
2849
  url: annotation.url,
2757
2850
  title: annotation.title
2758
2851
  });
@@ -2760,7 +2853,7 @@ var OpenAIResponsesLanguageModel = class {
2760
2853
  content.push({
2761
2854
  type: "source",
2762
2855
  sourceType: "document",
2763
- id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils13.generateId)(),
2856
+ id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils14.generateId)(),
2764
2857
  mediaType: "text/plain",
2765
2858
  title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
2766
2859
  filename: (_l = annotation.filename) != null ? _l : annotation.file_id
@@ -2912,18 +3005,18 @@ var OpenAIResponsesLanguageModel = class {
2912
3005
  warnings,
2913
3006
  webSearchToolName
2914
3007
  } = await this.getArgs(options);
2915
- const { responseHeaders, value: response } = await (0, import_provider_utils13.postJsonToApi)({
3008
+ const { responseHeaders, value: response } = await (0, import_provider_utils14.postJsonToApi)({
2916
3009
  url: this.config.url({
2917
3010
  path: "/responses",
2918
3011
  modelId: this.modelId
2919
3012
  }),
2920
- headers: (0, import_provider_utils13.combineHeaders)(this.config.headers(), options.headers),
3013
+ headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
2921
3014
  body: {
2922
3015
  ...body,
2923
3016
  stream: true
2924
3017
  },
2925
3018
  failedResponseHandler: openaiFailedResponseHandler,
2926
- successfulResponseHandler: (0, import_provider_utils13.createEventSourceResponseHandler)(
3019
+ successfulResponseHandler: (0, import_provider_utils14.createEventSourceResponseHandler)(
2927
3020
  openaiResponsesChunkSchema
2928
3021
  ),
2929
3022
  abortSignal: options.abortSignal,
@@ -3151,6 +3244,26 @@ var OpenAIResponsesLanguageModel = class {
3151
3244
  },
3152
3245
  providerExecuted: true
3153
3246
  });
3247
+ } else if (value.item.type === "local_shell_call") {
3248
+ ongoingToolCalls[value.output_index] = void 0;
3249
+ controller.enqueue({
3250
+ type: "tool-call",
3251
+ toolCallId: value.item.call_id,
3252
+ toolName: "local_shell",
3253
+ input: JSON.stringify({
3254
+ action: {
3255
+ type: "exec",
3256
+ command: value.item.action.command,
3257
+ timeoutMs: value.item.action.timeout_ms,
3258
+ user: value.item.action.user,
3259
+ workingDirectory: value.item.action.working_directory,
3260
+ env: value.item.action.env
3261
+ }
3262
+ }),
3263
+ providerMetadata: {
3264
+ openai: { itemId: value.item.id }
3265
+ }
3266
+ });
3154
3267
  } else if (value.item.type === "message") {
3155
3268
  controller.enqueue({
3156
3269
  type: "text-end",
@@ -3277,7 +3390,7 @@ var OpenAIResponsesLanguageModel = class {
3277
3390
  controller.enqueue({
3278
3391
  type: "source",
3279
3392
  sourceType: "url",
3280
- id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils13.generateId)(),
3393
+ id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils14.generateId)(),
3281
3394
  url: value.annotation.url,
3282
3395
  title: value.annotation.title
3283
3396
  });
@@ -3285,7 +3398,7 @@ var OpenAIResponsesLanguageModel = class {
3285
3398
  controller.enqueue({
3286
3399
  type: "source",
3287
3400
  sourceType: "document",
3288
- id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils13.generateId)(),
3401
+ id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils14.generateId)(),
3289
3402
  mediaType: "text/plain",
3290
3403
  title: (_v = (_u = value.annotation.quote) != null ? _u : value.annotation.filename) != null ? _v : "Document",
3291
3404
  filename: (_w = value.annotation.filename) != null ? _w : value.annotation.file_id
@@ -3321,180 +3434,181 @@ var OpenAIResponsesLanguageModel = class {
3321
3434
  };
3322
3435
  }
3323
3436
  };
3324
- var usageSchema2 = import_v415.z.object({
3325
- input_tokens: import_v415.z.number(),
3326
- input_tokens_details: import_v415.z.object({ cached_tokens: import_v415.z.number().nullish() }).nullish(),
3327
- output_tokens: import_v415.z.number(),
3328
- output_tokens_details: import_v415.z.object({ reasoning_tokens: import_v415.z.number().nullish() }).nullish()
3437
+ var usageSchema2 = import_v416.z.object({
3438
+ input_tokens: import_v416.z.number(),
3439
+ input_tokens_details: import_v416.z.object({ cached_tokens: import_v416.z.number().nullish() }).nullish(),
3440
+ output_tokens: import_v416.z.number(),
3441
+ output_tokens_details: import_v416.z.object({ reasoning_tokens: import_v416.z.number().nullish() }).nullish()
3329
3442
  });
3330
- var textDeltaChunkSchema = import_v415.z.object({
3331
- type: import_v415.z.literal("response.output_text.delta"),
3332
- item_id: import_v415.z.string(),
3333
- delta: import_v415.z.string(),
3443
+ var textDeltaChunkSchema = import_v416.z.object({
3444
+ type: import_v416.z.literal("response.output_text.delta"),
3445
+ item_id: import_v416.z.string(),
3446
+ delta: import_v416.z.string(),
3334
3447
  logprobs: LOGPROBS_SCHEMA.nullish()
3335
3448
  });
3336
- var errorChunkSchema = import_v415.z.object({
3337
- type: import_v415.z.literal("error"),
3338
- code: import_v415.z.string(),
3339
- message: import_v415.z.string(),
3340
- param: import_v415.z.string().nullish(),
3341
- sequence_number: import_v415.z.number()
3449
+ var errorChunkSchema = import_v416.z.object({
3450
+ type: import_v416.z.literal("error"),
3451
+ code: import_v416.z.string(),
3452
+ message: import_v416.z.string(),
3453
+ param: import_v416.z.string().nullish(),
3454
+ sequence_number: import_v416.z.number()
3342
3455
  });
3343
- var responseFinishedChunkSchema = import_v415.z.object({
3344
- type: import_v415.z.enum(["response.completed", "response.incomplete"]),
3345
- response: import_v415.z.object({
3346
- incomplete_details: import_v415.z.object({ reason: import_v415.z.string() }).nullish(),
3456
+ var responseFinishedChunkSchema = import_v416.z.object({
3457
+ type: import_v416.z.enum(["response.completed", "response.incomplete"]),
3458
+ response: import_v416.z.object({
3459
+ incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
3347
3460
  usage: usageSchema2,
3348
- service_tier: import_v415.z.string().nullish()
3461
+ service_tier: import_v416.z.string().nullish()
3349
3462
  })
3350
3463
  });
3351
- var responseCreatedChunkSchema = import_v415.z.object({
3352
- type: import_v415.z.literal("response.created"),
3353
- response: import_v415.z.object({
3354
- id: import_v415.z.string(),
3355
- created_at: import_v415.z.number(),
3356
- model: import_v415.z.string(),
3357
- service_tier: import_v415.z.string().nullish()
3464
+ var responseCreatedChunkSchema = import_v416.z.object({
3465
+ type: import_v416.z.literal("response.created"),
3466
+ response: import_v416.z.object({
3467
+ id: import_v416.z.string(),
3468
+ created_at: import_v416.z.number(),
3469
+ model: import_v416.z.string(),
3470
+ service_tier: import_v416.z.string().nullish()
3358
3471
  })
3359
3472
  });
3360
- var responseOutputItemAddedSchema = import_v415.z.object({
3361
- type: import_v415.z.literal("response.output_item.added"),
3362
- output_index: import_v415.z.number(),
3363
- item: import_v415.z.discriminatedUnion("type", [
3364
- import_v415.z.object({
3365
- type: import_v415.z.literal("message"),
3366
- id: import_v415.z.string()
3473
+ var responseOutputItemAddedSchema = import_v416.z.object({
3474
+ type: import_v416.z.literal("response.output_item.added"),
3475
+ output_index: import_v416.z.number(),
3476
+ item: import_v416.z.discriminatedUnion("type", [
3477
+ import_v416.z.object({
3478
+ type: import_v416.z.literal("message"),
3479
+ id: import_v416.z.string()
3367
3480
  }),
3368
- import_v415.z.object({
3369
- type: import_v415.z.literal("reasoning"),
3370
- id: import_v415.z.string(),
3371
- encrypted_content: import_v415.z.string().nullish()
3481
+ import_v416.z.object({
3482
+ type: import_v416.z.literal("reasoning"),
3483
+ id: import_v416.z.string(),
3484
+ encrypted_content: import_v416.z.string().nullish()
3372
3485
  }),
3373
- import_v415.z.object({
3374
- type: import_v415.z.literal("function_call"),
3375
- id: import_v415.z.string(),
3376
- call_id: import_v415.z.string(),
3377
- name: import_v415.z.string(),
3378
- arguments: import_v415.z.string()
3486
+ import_v416.z.object({
3487
+ type: import_v416.z.literal("function_call"),
3488
+ id: import_v416.z.string(),
3489
+ call_id: import_v416.z.string(),
3490
+ name: import_v416.z.string(),
3491
+ arguments: import_v416.z.string()
3379
3492
  }),
3380
- import_v415.z.object({
3381
- type: import_v415.z.literal("web_search_call"),
3382
- id: import_v415.z.string(),
3383
- status: import_v415.z.string(),
3384
- action: import_v415.z.object({
3385
- type: import_v415.z.literal("search"),
3386
- query: import_v415.z.string().optional()
3493
+ import_v416.z.object({
3494
+ type: import_v416.z.literal("web_search_call"),
3495
+ id: import_v416.z.string(),
3496
+ status: import_v416.z.string(),
3497
+ action: import_v416.z.object({
3498
+ type: import_v416.z.literal("search"),
3499
+ query: import_v416.z.string().optional()
3387
3500
  }).nullish()
3388
3501
  }),
3389
- import_v415.z.object({
3390
- type: import_v415.z.literal("computer_call"),
3391
- id: import_v415.z.string(),
3392
- status: import_v415.z.string()
3502
+ import_v416.z.object({
3503
+ type: import_v416.z.literal("computer_call"),
3504
+ id: import_v416.z.string(),
3505
+ status: import_v416.z.string()
3393
3506
  }),
3394
- import_v415.z.object({
3395
- type: import_v415.z.literal("file_search_call"),
3396
- id: import_v415.z.string()
3507
+ import_v416.z.object({
3508
+ type: import_v416.z.literal("file_search_call"),
3509
+ id: import_v416.z.string()
3397
3510
  }),
3398
- import_v415.z.object({
3399
- type: import_v415.z.literal("image_generation_call"),
3400
- id: import_v415.z.string()
3511
+ import_v416.z.object({
3512
+ type: import_v416.z.literal("image_generation_call"),
3513
+ id: import_v416.z.string()
3401
3514
  }),
3402
- import_v415.z.object({
3403
- type: import_v415.z.literal("code_interpreter_call"),
3404
- id: import_v415.z.string(),
3405
- container_id: import_v415.z.string(),
3406
- code: import_v415.z.string().nullable(),
3407
- outputs: import_v415.z.array(
3408
- import_v415.z.discriminatedUnion("type", [
3409
- import_v415.z.object({ type: import_v415.z.literal("logs"), logs: import_v415.z.string() }),
3410
- import_v415.z.object({ type: import_v415.z.literal("image"), url: import_v415.z.string() })
3515
+ import_v416.z.object({
3516
+ type: import_v416.z.literal("code_interpreter_call"),
3517
+ id: import_v416.z.string(),
3518
+ container_id: import_v416.z.string(),
3519
+ code: import_v416.z.string().nullable(),
3520
+ outputs: import_v416.z.array(
3521
+ import_v416.z.discriminatedUnion("type", [
3522
+ import_v416.z.object({ type: import_v416.z.literal("logs"), logs: import_v416.z.string() }),
3523
+ import_v416.z.object({ type: import_v416.z.literal("image"), url: import_v416.z.string() })
3411
3524
  ])
3412
3525
  ).nullable(),
3413
- status: import_v415.z.string()
3526
+ status: import_v416.z.string()
3414
3527
  })
3415
3528
  ])
3416
3529
  });
3417
- var responseOutputItemDoneSchema = import_v415.z.object({
3418
- type: import_v415.z.literal("response.output_item.done"),
3419
- output_index: import_v415.z.number(),
3420
- item: import_v415.z.discriminatedUnion("type", [
3421
- import_v415.z.object({
3422
- type: import_v415.z.literal("message"),
3423
- id: import_v415.z.string()
3530
+ var responseOutputItemDoneSchema = import_v416.z.object({
3531
+ type: import_v416.z.literal("response.output_item.done"),
3532
+ output_index: import_v416.z.number(),
3533
+ item: import_v416.z.discriminatedUnion("type", [
3534
+ import_v416.z.object({
3535
+ type: import_v416.z.literal("message"),
3536
+ id: import_v416.z.string()
3424
3537
  }),
3425
- import_v415.z.object({
3426
- type: import_v415.z.literal("reasoning"),
3427
- id: import_v415.z.string(),
3428
- encrypted_content: import_v415.z.string().nullish()
3538
+ import_v416.z.object({
3539
+ type: import_v416.z.literal("reasoning"),
3540
+ id: import_v416.z.string(),
3541
+ encrypted_content: import_v416.z.string().nullish()
3429
3542
  }),
3430
- import_v415.z.object({
3431
- type: import_v415.z.literal("function_call"),
3432
- id: import_v415.z.string(),
3433
- call_id: import_v415.z.string(),
3434
- name: import_v415.z.string(),
3435
- arguments: import_v415.z.string(),
3436
- status: import_v415.z.literal("completed")
3543
+ import_v416.z.object({
3544
+ type: import_v416.z.literal("function_call"),
3545
+ id: import_v416.z.string(),
3546
+ call_id: import_v416.z.string(),
3547
+ name: import_v416.z.string(),
3548
+ arguments: import_v416.z.string(),
3549
+ status: import_v416.z.literal("completed")
3437
3550
  }),
3438
3551
  codeInterpreterCallItem,
3439
3552
  imageGenerationCallItem,
3440
3553
  webSearchCallItem,
3441
3554
  fileSearchCallItem,
3442
- import_v415.z.object({
3443
- type: import_v415.z.literal("computer_call"),
3444
- id: import_v415.z.string(),
3445
- status: import_v415.z.literal("completed")
3555
+ localShellCallItem,
3556
+ import_v416.z.object({
3557
+ type: import_v416.z.literal("computer_call"),
3558
+ id: import_v416.z.string(),
3559
+ status: import_v416.z.literal("completed")
3446
3560
  })
3447
3561
  ])
3448
3562
  });
3449
- var responseFunctionCallArgumentsDeltaSchema = import_v415.z.object({
3450
- type: import_v415.z.literal("response.function_call_arguments.delta"),
3451
- item_id: import_v415.z.string(),
3452
- output_index: import_v415.z.number(),
3453
- delta: import_v415.z.string()
3563
+ var responseFunctionCallArgumentsDeltaSchema = import_v416.z.object({
3564
+ type: import_v416.z.literal("response.function_call_arguments.delta"),
3565
+ item_id: import_v416.z.string(),
3566
+ output_index: import_v416.z.number(),
3567
+ delta: import_v416.z.string()
3454
3568
  });
3455
- var responseCodeInterpreterCallCodeDeltaSchema = import_v415.z.object({
3456
- type: import_v415.z.literal("response.code_interpreter_call_code.delta"),
3457
- item_id: import_v415.z.string(),
3458
- output_index: import_v415.z.number(),
3459
- delta: import_v415.z.string()
3569
+ var responseCodeInterpreterCallCodeDeltaSchema = import_v416.z.object({
3570
+ type: import_v416.z.literal("response.code_interpreter_call_code.delta"),
3571
+ item_id: import_v416.z.string(),
3572
+ output_index: import_v416.z.number(),
3573
+ delta: import_v416.z.string()
3460
3574
  });
3461
- var responseCodeInterpreterCallCodeDoneSchema = import_v415.z.object({
3462
- type: import_v415.z.literal("response.code_interpreter_call_code.done"),
3463
- item_id: import_v415.z.string(),
3464
- output_index: import_v415.z.number(),
3465
- code: import_v415.z.string()
3575
+ var responseCodeInterpreterCallCodeDoneSchema = import_v416.z.object({
3576
+ type: import_v416.z.literal("response.code_interpreter_call_code.done"),
3577
+ item_id: import_v416.z.string(),
3578
+ output_index: import_v416.z.number(),
3579
+ code: import_v416.z.string()
3466
3580
  });
3467
- var responseAnnotationAddedSchema = import_v415.z.object({
3468
- type: import_v415.z.literal("response.output_text.annotation.added"),
3469
- annotation: import_v415.z.discriminatedUnion("type", [
3470
- import_v415.z.object({
3471
- type: import_v415.z.literal("url_citation"),
3472
- url: import_v415.z.string(),
3473
- title: import_v415.z.string()
3581
+ var responseAnnotationAddedSchema = import_v416.z.object({
3582
+ type: import_v416.z.literal("response.output_text.annotation.added"),
3583
+ annotation: import_v416.z.discriminatedUnion("type", [
3584
+ import_v416.z.object({
3585
+ type: import_v416.z.literal("url_citation"),
3586
+ url: import_v416.z.string(),
3587
+ title: import_v416.z.string()
3474
3588
  }),
3475
- import_v415.z.object({
3476
- type: import_v415.z.literal("file_citation"),
3477
- file_id: import_v415.z.string(),
3478
- filename: import_v415.z.string().nullish(),
3479
- index: import_v415.z.number().nullish(),
3480
- start_index: import_v415.z.number().nullish(),
3481
- end_index: import_v415.z.number().nullish(),
3482
- quote: import_v415.z.string().nullish()
3589
+ import_v416.z.object({
3590
+ type: import_v416.z.literal("file_citation"),
3591
+ file_id: import_v416.z.string(),
3592
+ filename: import_v416.z.string().nullish(),
3593
+ index: import_v416.z.number().nullish(),
3594
+ start_index: import_v416.z.number().nullish(),
3595
+ end_index: import_v416.z.number().nullish(),
3596
+ quote: import_v416.z.string().nullish()
3483
3597
  })
3484
3598
  ])
3485
3599
  });
3486
- var responseReasoningSummaryPartAddedSchema = import_v415.z.object({
3487
- type: import_v415.z.literal("response.reasoning_summary_part.added"),
3488
- item_id: import_v415.z.string(),
3489
- summary_index: import_v415.z.number()
3600
+ var responseReasoningSummaryPartAddedSchema = import_v416.z.object({
3601
+ type: import_v416.z.literal("response.reasoning_summary_part.added"),
3602
+ item_id: import_v416.z.string(),
3603
+ summary_index: import_v416.z.number()
3490
3604
  });
3491
- var responseReasoningSummaryTextDeltaSchema = import_v415.z.object({
3492
- type: import_v415.z.literal("response.reasoning_summary_text.delta"),
3493
- item_id: import_v415.z.string(),
3494
- summary_index: import_v415.z.number(),
3495
- delta: import_v415.z.string()
3605
+ var responseReasoningSummaryTextDeltaSchema = import_v416.z.object({
3606
+ type: import_v416.z.literal("response.reasoning_summary_text.delta"),
3607
+ item_id: import_v416.z.string(),
3608
+ summary_index: import_v416.z.number(),
3609
+ delta: import_v416.z.string()
3496
3610
  });
3497
- var openaiResponsesChunkSchema = import_v415.z.union([
3611
+ var openaiResponsesChunkSchema = import_v416.z.union([
3498
3612
  textDeltaChunkSchema,
3499
3613
  responseFinishedChunkSchema,
3500
3614
  responseCreatedChunkSchema,
@@ -3507,7 +3621,7 @@ var openaiResponsesChunkSchema = import_v415.z.union([
3507
3621
  responseReasoningSummaryPartAddedSchema,
3508
3622
  responseReasoningSummaryTextDeltaSchema,
3509
3623
  errorChunkSchema,
3510
- import_v415.z.object({ type: import_v415.z.string() }).loose()
3624
+ import_v416.z.object({ type: import_v416.z.string() }).loose()
3511
3625
  // fallback for unknown chunks
3512
3626
  ]);
3513
3627
  function isTextDeltaChunk(chunk) {
@@ -3586,15 +3700,15 @@ function getResponsesModelConfig(modelId) {
3586
3700
  isReasoningModel: false
3587
3701
  };
3588
3702
  }
3589
- var openaiResponsesProviderOptionsSchema = import_v415.z.object({
3590
- include: import_v415.z.array(
3591
- import_v415.z.enum([
3703
+ var openaiResponsesProviderOptionsSchema = import_v416.z.object({
3704
+ include: import_v416.z.array(
3705
+ import_v416.z.enum([
3592
3706
  "reasoning.encrypted_content",
3593
3707
  "file_search_call.results",
3594
3708
  "message.output_text.logprobs"
3595
3709
  ])
3596
3710
  ).nullish(),
3597
- instructions: import_v415.z.string().nullish(),
3711
+ instructions: import_v416.z.string().nullish(),
3598
3712
  /**
3599
3713
  * Return the log probabilities of the tokens.
3600
3714
  *
@@ -3607,33 +3721,33 @@ var openaiResponsesProviderOptionsSchema = import_v415.z.object({
3607
3721
  * @see https://platform.openai.com/docs/api-reference/responses/create
3608
3722
  * @see https://cookbook.openai.com/examples/using_logprobs
3609
3723
  */
3610
- logprobs: import_v415.z.union([import_v415.z.boolean(), import_v415.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
3724
+ logprobs: import_v416.z.union([import_v416.z.boolean(), import_v416.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
3611
3725
  /**
3612
3726
  * The maximum number of total calls to built-in tools that can be processed in a response.
3613
3727
  * This maximum number applies across all built-in tool calls, not per individual tool.
3614
3728
  * Any further attempts to call a tool by the model will be ignored.
3615
3729
  */
3616
- maxToolCalls: import_v415.z.number().nullish(),
3617
- metadata: import_v415.z.any().nullish(),
3618
- parallelToolCalls: import_v415.z.boolean().nullish(),
3619
- previousResponseId: import_v415.z.string().nullish(),
3620
- promptCacheKey: import_v415.z.string().nullish(),
3621
- reasoningEffort: import_v415.z.string().nullish(),
3622
- reasoningSummary: import_v415.z.string().nullish(),
3623
- safetyIdentifier: import_v415.z.string().nullish(),
3624
- serviceTier: import_v415.z.enum(["auto", "flex", "priority"]).nullish(),
3625
- store: import_v415.z.boolean().nullish(),
3626
- strictJsonSchema: import_v415.z.boolean().nullish(),
3627
- textVerbosity: import_v415.z.enum(["low", "medium", "high"]).nullish(),
3628
- user: import_v415.z.string().nullish()
3730
+ maxToolCalls: import_v416.z.number().nullish(),
3731
+ metadata: import_v416.z.any().nullish(),
3732
+ parallelToolCalls: import_v416.z.boolean().nullish(),
3733
+ previousResponseId: import_v416.z.string().nullish(),
3734
+ promptCacheKey: import_v416.z.string().nullish(),
3735
+ reasoningEffort: import_v416.z.string().nullish(),
3736
+ reasoningSummary: import_v416.z.string().nullish(),
3737
+ safetyIdentifier: import_v416.z.string().nullish(),
3738
+ serviceTier: import_v416.z.enum(["auto", "flex", "priority"]).nullish(),
3739
+ store: import_v416.z.boolean().nullish(),
3740
+ strictJsonSchema: import_v416.z.boolean().nullish(),
3741
+ textVerbosity: import_v416.z.enum(["low", "medium", "high"]).nullish(),
3742
+ user: import_v416.z.string().nullish()
3629
3743
  });
3630
3744
 
3631
3745
  // src/speech/openai-speech-model.ts
3632
- var import_provider_utils14 = require("@ai-sdk/provider-utils");
3633
- var import_v416 = require("zod/v4");
3634
- var OpenAIProviderOptionsSchema = import_v416.z.object({
3635
- instructions: import_v416.z.string().nullish(),
3636
- speed: import_v416.z.number().min(0.25).max(4).default(1).nullish()
3746
+ var import_provider_utils15 = require("@ai-sdk/provider-utils");
3747
+ var import_v417 = require("zod/v4");
3748
+ var OpenAIProviderOptionsSchema = import_v417.z.object({
3749
+ instructions: import_v417.z.string().nullish(),
3750
+ speed: import_v417.z.number().min(0.25).max(4).default(1).nullish()
3637
3751
  });
3638
3752
  var OpenAISpeechModel = class {
3639
3753
  constructor(modelId, config) {
@@ -3654,7 +3768,7 @@ var OpenAISpeechModel = class {
3654
3768
  providerOptions
3655
3769
  }) {
3656
3770
  const warnings = [];
3657
- const openAIOptions = await (0, import_provider_utils14.parseProviderOptions)({
3771
+ const openAIOptions = await (0, import_provider_utils15.parseProviderOptions)({
3658
3772
  provider: "openai",
3659
3773
  providerOptions,
3660
3774
  schema: OpenAIProviderOptionsSchema
@@ -3707,15 +3821,15 @@ var OpenAISpeechModel = class {
3707
3821
  value: audio,
3708
3822
  responseHeaders,
3709
3823
  rawValue: rawResponse
3710
- } = await (0, import_provider_utils14.postJsonToApi)({
3824
+ } = await (0, import_provider_utils15.postJsonToApi)({
3711
3825
  url: this.config.url({
3712
3826
  path: "/audio/speech",
3713
3827
  modelId: this.modelId
3714
3828
  }),
3715
- headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
3829
+ headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
3716
3830
  body: requestBody,
3717
3831
  failedResponseHandler: openaiFailedResponseHandler,
3718
- successfulResponseHandler: (0, import_provider_utils14.createBinaryResponseHandler)(),
3832
+ successfulResponseHandler: (0, import_provider_utils15.createBinaryResponseHandler)(),
3719
3833
  abortSignal: options.abortSignal,
3720
3834
  fetch: this.config.fetch
3721
3835
  });
@@ -3736,34 +3850,34 @@ var OpenAISpeechModel = class {
3736
3850
  };
3737
3851
 
3738
3852
  // src/transcription/openai-transcription-model.ts
3739
- var import_provider_utils15 = require("@ai-sdk/provider-utils");
3740
- var import_v418 = require("zod/v4");
3853
+ var import_provider_utils16 = require("@ai-sdk/provider-utils");
3854
+ var import_v419 = require("zod/v4");
3741
3855
 
3742
3856
  // src/transcription/openai-transcription-options.ts
3743
- var import_v417 = require("zod/v4");
3744
- var openAITranscriptionProviderOptions = import_v417.z.object({
3857
+ var import_v418 = require("zod/v4");
3858
+ var openAITranscriptionProviderOptions = import_v418.z.object({
3745
3859
  /**
3746
3860
  * Additional information to include in the transcription response.
3747
3861
  */
3748
- include: import_v417.z.array(import_v417.z.string()).optional(),
3862
+ include: import_v418.z.array(import_v418.z.string()).optional(),
3749
3863
  /**
3750
3864
  * The language of the input audio in ISO-639-1 format.
3751
3865
  */
3752
- language: import_v417.z.string().optional(),
3866
+ language: import_v418.z.string().optional(),
3753
3867
  /**
3754
3868
  * An optional text to guide the model's style or continue a previous audio segment.
3755
3869
  */
3756
- prompt: import_v417.z.string().optional(),
3870
+ prompt: import_v418.z.string().optional(),
3757
3871
  /**
3758
3872
  * The sampling temperature, between 0 and 1.
3759
3873
  * @default 0
3760
3874
  */
3761
- temperature: import_v417.z.number().min(0).max(1).default(0).optional(),
3875
+ temperature: import_v418.z.number().min(0).max(1).default(0).optional(),
3762
3876
  /**
3763
3877
  * The timestamp granularities to populate for this transcription.
3764
3878
  * @default ['segment']
3765
3879
  */
3766
- timestampGranularities: import_v417.z.array(import_v417.z.enum(["word", "segment"])).default(["segment"]).optional()
3880
+ timestampGranularities: import_v418.z.array(import_v418.z.enum(["word", "segment"])).default(["segment"]).optional()
3767
3881
  });
3768
3882
 
3769
3883
  // src/transcription/openai-transcription-model.ts
@@ -3841,15 +3955,15 @@ var OpenAITranscriptionModel = class {
3841
3955
  providerOptions
3842
3956
  }) {
3843
3957
  const warnings = [];
3844
- const openAIOptions = await (0, import_provider_utils15.parseProviderOptions)({
3958
+ const openAIOptions = await (0, import_provider_utils16.parseProviderOptions)({
3845
3959
  provider: "openai",
3846
3960
  providerOptions,
3847
3961
  schema: openAITranscriptionProviderOptions
3848
3962
  });
3849
3963
  const formData = new FormData();
3850
- const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils15.convertBase64ToUint8Array)(audio)]);
3964
+ const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils16.convertBase64ToUint8Array)(audio)]);
3851
3965
  formData.append("model", this.modelId);
3852
- const fileExtension = (0, import_provider_utils15.mediaTypeToExtension)(mediaType);
3966
+ const fileExtension = (0, import_provider_utils16.mediaTypeToExtension)(mediaType);
3853
3967
  formData.append(
3854
3968
  "file",
3855
3969
  new File([blob], "audio", { type: mediaType }),
@@ -3894,15 +4008,15 @@ var OpenAITranscriptionModel = class {
3894
4008
  value: response,
3895
4009
  responseHeaders,
3896
4010
  rawValue: rawResponse
3897
- } = await (0, import_provider_utils15.postFormDataToApi)({
4011
+ } = await (0, import_provider_utils16.postFormDataToApi)({
3898
4012
  url: this.config.url({
3899
4013
  path: "/audio/transcriptions",
3900
4014
  modelId: this.modelId
3901
4015
  }),
3902
- headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
4016
+ headers: (0, import_provider_utils16.combineHeaders)(this.config.headers(), options.headers),
3903
4017
  formData,
3904
4018
  failedResponseHandler: openaiFailedResponseHandler,
3905
- successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
4019
+ successfulResponseHandler: (0, import_provider_utils16.createJsonResponseHandler)(
3906
4020
  openaiTranscriptionResponseSchema
3907
4021
  ),
3908
4022
  abortSignal: options.abortSignal,
@@ -3932,44 +4046,44 @@ var OpenAITranscriptionModel = class {
3932
4046
  };
3933
4047
  }
3934
4048
  };
3935
- var openaiTranscriptionResponseSchema = import_v418.z.object({
3936
- text: import_v418.z.string(),
3937
- language: import_v418.z.string().nullish(),
3938
- duration: import_v418.z.number().nullish(),
3939
- words: import_v418.z.array(
3940
- import_v418.z.object({
3941
- word: import_v418.z.string(),
3942
- start: import_v418.z.number(),
3943
- end: import_v418.z.number()
4049
+ var openaiTranscriptionResponseSchema = import_v419.z.object({
4050
+ text: import_v419.z.string(),
4051
+ language: import_v419.z.string().nullish(),
4052
+ duration: import_v419.z.number().nullish(),
4053
+ words: import_v419.z.array(
4054
+ import_v419.z.object({
4055
+ word: import_v419.z.string(),
4056
+ start: import_v419.z.number(),
4057
+ end: import_v419.z.number()
3944
4058
  })
3945
4059
  ).nullish(),
3946
- segments: import_v418.z.array(
3947
- import_v418.z.object({
3948
- id: import_v418.z.number(),
3949
- seek: import_v418.z.number(),
3950
- start: import_v418.z.number(),
3951
- end: import_v418.z.number(),
3952
- text: import_v418.z.string(),
3953
- tokens: import_v418.z.array(import_v418.z.number()),
3954
- temperature: import_v418.z.number(),
3955
- avg_logprob: import_v418.z.number(),
3956
- compression_ratio: import_v418.z.number(),
3957
- no_speech_prob: import_v418.z.number()
4060
+ segments: import_v419.z.array(
4061
+ import_v419.z.object({
4062
+ id: import_v419.z.number(),
4063
+ seek: import_v419.z.number(),
4064
+ start: import_v419.z.number(),
4065
+ end: import_v419.z.number(),
4066
+ text: import_v419.z.string(),
4067
+ tokens: import_v419.z.array(import_v419.z.number()),
4068
+ temperature: import_v419.z.number(),
4069
+ avg_logprob: import_v419.z.number(),
4070
+ compression_ratio: import_v419.z.number(),
4071
+ no_speech_prob: import_v419.z.number()
3958
4072
  })
3959
4073
  ).nullish()
3960
4074
  });
3961
4075
 
3962
4076
  // src/version.ts
3963
- var VERSION = true ? "2.0.39" : "0.0.0-test";
4077
+ var VERSION = true ? "2.0.40" : "0.0.0-test";
3964
4078
 
3965
4079
  // src/openai-provider.ts
3966
4080
  function createOpenAI(options = {}) {
3967
4081
  var _a, _b;
3968
- const baseURL = (_a = (0, import_provider_utils16.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
4082
+ const baseURL = (_a = (0, import_provider_utils17.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
3969
4083
  const providerName = (_b = options.name) != null ? _b : "openai";
3970
- const getHeaders = () => (0, import_provider_utils16.withUserAgentSuffix)(
4084
+ const getHeaders = () => (0, import_provider_utils17.withUserAgentSuffix)(
3971
4085
  {
3972
- Authorization: `Bearer ${(0, import_provider_utils16.loadApiKey)({
4086
+ Authorization: `Bearer ${(0, import_provider_utils17.loadApiKey)({
3973
4087
  apiKey: options.apiKey,
3974
4088
  environmentVariableName: "OPENAI_API_KEY",
3975
4089
  description: "OpenAI"