@ai-sdk/openai 2.1.0-beta.6 → 2.1.0-beta.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -27,7 +27,7 @@ __export(src_exports, {
27
27
  module.exports = __toCommonJS(src_exports);
28
28
 
29
29
  // src/openai-provider.ts
30
- var import_provider_utils16 = require("@ai-sdk/provider-utils");
30
+ var import_provider_utils17 = require("@ai-sdk/provider-utils");
31
31
 
32
32
  // src/chat/openai-chat-language-model.ts
33
33
  var import_provider3 = require("@ai-sdk/provider");
@@ -415,7 +415,7 @@ function prepareChatTools({
415
415
  // src/chat/openai-chat-language-model.ts
416
416
  var OpenAIChatLanguageModel = class {
417
417
  constructor(modelId, config) {
418
- this.specificationVersion = "v2";
418
+ this.specificationVersion = "v3";
419
419
  this.supportedUrls = {
420
420
  "image/*": [/^https?:\/\/.*$/]
421
421
  };
@@ -1224,7 +1224,7 @@ var openaiCompletionProviderOptions = import_v44.z.object({
1224
1224
  // src/completion/openai-completion-language-model.ts
1225
1225
  var OpenAICompletionLanguageModel = class {
1226
1226
  constructor(modelId, config) {
1227
- this.specificationVersion = "v2";
1227
+ this.specificationVersion = "v3";
1228
1228
  this.supportedUrls = {
1229
1229
  // No URLs are supported for completion models.
1230
1230
  };
@@ -1783,39 +1783,62 @@ var imageGeneration = (args = {}) => {
1783
1783
  return imageGenerationToolFactory(args);
1784
1784
  };
1785
1785
 
1786
- // src/tool/web-search.ts
1786
+ // src/tool/local-shell.ts
1787
1787
  var import_provider_utils10 = require("@ai-sdk/provider-utils");
1788
1788
  var import_v412 = require("zod/v4");
1789
- var webSearchArgsSchema = import_v412.z.object({
1790
- filters: import_v412.z.object({
1791
- allowedDomains: import_v412.z.array(import_v412.z.string()).optional()
1789
+ var localShellInputSchema = import_v412.z.object({
1790
+ action: import_v412.z.object({
1791
+ type: import_v412.z.literal("exec"),
1792
+ command: import_v412.z.array(import_v412.z.string()),
1793
+ timeoutMs: import_v412.z.number().optional(),
1794
+ user: import_v412.z.string().optional(),
1795
+ workingDirectory: import_v412.z.string().optional(),
1796
+ env: import_v412.z.record(import_v412.z.string(), import_v412.z.string()).optional()
1797
+ })
1798
+ });
1799
+ var localShellOutputSchema = import_v412.z.object({
1800
+ output: import_v412.z.string()
1801
+ });
1802
+ var localShell = (0, import_provider_utils10.createProviderDefinedToolFactoryWithOutputSchema)({
1803
+ id: "openai.local_shell",
1804
+ name: "local_shell",
1805
+ inputSchema: localShellInputSchema,
1806
+ outputSchema: localShellOutputSchema
1807
+ });
1808
+
1809
+ // src/tool/web-search.ts
1810
+ var import_provider_utils11 = require("@ai-sdk/provider-utils");
1811
+ var import_v413 = require("zod/v4");
1812
+ var webSearchArgsSchema = import_v413.z.object({
1813
+ filters: import_v413.z.object({
1814
+ allowedDomains: import_v413.z.array(import_v413.z.string()).optional()
1792
1815
  }).optional(),
1793
- searchContextSize: import_v412.z.enum(["low", "medium", "high"]).optional(),
1794
- userLocation: import_v412.z.object({
1795
- type: import_v412.z.literal("approximate"),
1796
- country: import_v412.z.string().optional(),
1797
- city: import_v412.z.string().optional(),
1798
- region: import_v412.z.string().optional(),
1799
- timezone: import_v412.z.string().optional()
1816
+ searchContextSize: import_v413.z.enum(["low", "medium", "high"]).optional(),
1817
+ userLocation: import_v413.z.object({
1818
+ type: import_v413.z.literal("approximate"),
1819
+ country: import_v413.z.string().optional(),
1820
+ city: import_v413.z.string().optional(),
1821
+ region: import_v413.z.string().optional(),
1822
+ timezone: import_v413.z.string().optional()
1800
1823
  }).optional()
1801
1824
  });
1802
- var webSearchToolFactory = (0, import_provider_utils10.createProviderDefinedToolFactory)({
1825
+ var webSearchToolFactory = (0, import_provider_utils11.createProviderDefinedToolFactory)({
1803
1826
  id: "openai.web_search",
1804
1827
  name: "web_search",
1805
- inputSchema: import_v412.z.object({
1806
- action: import_v412.z.discriminatedUnion("type", [
1807
- import_v412.z.object({
1808
- type: import_v412.z.literal("search"),
1809
- query: import_v412.z.string().nullish()
1828
+ inputSchema: import_v413.z.object({
1829
+ action: import_v413.z.discriminatedUnion("type", [
1830
+ import_v413.z.object({
1831
+ type: import_v413.z.literal("search"),
1832
+ query: import_v413.z.string().nullish()
1810
1833
  }),
1811
- import_v412.z.object({
1812
- type: import_v412.z.literal("open_page"),
1813
- url: import_v412.z.string()
1834
+ import_v413.z.object({
1835
+ type: import_v413.z.literal("open_page"),
1836
+ url: import_v413.z.string()
1814
1837
  }),
1815
- import_v412.z.object({
1816
- type: import_v412.z.literal("find"),
1817
- url: import_v412.z.string(),
1818
- pattern: import_v412.z.string()
1838
+ import_v413.z.object({
1839
+ type: import_v413.z.literal("find"),
1840
+ url: import_v413.z.string(),
1841
+ pattern: import_v413.z.string()
1819
1842
  })
1820
1843
  ]).nullish()
1821
1844
  })
@@ -1825,59 +1848,59 @@ var webSearch = (args = {}) => {
1825
1848
  };
1826
1849
 
1827
1850
  // src/tool/web-search-preview.ts
1828
- var import_provider_utils11 = require("@ai-sdk/provider-utils");
1829
- var import_v413 = require("zod/v4");
1830
- var webSearchPreviewArgsSchema = import_v413.z.object({
1851
+ var import_provider_utils12 = require("@ai-sdk/provider-utils");
1852
+ var import_v414 = require("zod/v4");
1853
+ var webSearchPreviewArgsSchema = import_v414.z.object({
1831
1854
  /**
1832
1855
  * Search context size to use for the web search.
1833
1856
  * - high: Most comprehensive context, highest cost, slower response
1834
1857
  * - medium: Balanced context, cost, and latency (default)
1835
1858
  * - low: Least context, lowest cost, fastest response
1836
1859
  */
1837
- searchContextSize: import_v413.z.enum(["low", "medium", "high"]).optional(),
1860
+ searchContextSize: import_v414.z.enum(["low", "medium", "high"]).optional(),
1838
1861
  /**
1839
1862
  * User location information to provide geographically relevant search results.
1840
1863
  */
1841
- userLocation: import_v413.z.object({
1864
+ userLocation: import_v414.z.object({
1842
1865
  /**
1843
1866
  * Type of location (always 'approximate')
1844
1867
  */
1845
- type: import_v413.z.literal("approximate"),
1868
+ type: import_v414.z.literal("approximate"),
1846
1869
  /**
1847
1870
  * Two-letter ISO country code (e.g., 'US', 'GB')
1848
1871
  */
1849
- country: import_v413.z.string().optional(),
1872
+ country: import_v414.z.string().optional(),
1850
1873
  /**
1851
1874
  * City name (free text, e.g., 'Minneapolis')
1852
1875
  */
1853
- city: import_v413.z.string().optional(),
1876
+ city: import_v414.z.string().optional(),
1854
1877
  /**
1855
1878
  * Region name (free text, e.g., 'Minnesota')
1856
1879
  */
1857
- region: import_v413.z.string().optional(),
1880
+ region: import_v414.z.string().optional(),
1858
1881
  /**
1859
1882
  * IANA timezone (e.g., 'America/Chicago')
1860
1883
  */
1861
- timezone: import_v413.z.string().optional()
1884
+ timezone: import_v414.z.string().optional()
1862
1885
  }).optional()
1863
1886
  });
1864
- var webSearchPreview = (0, import_provider_utils11.createProviderDefinedToolFactory)({
1887
+ var webSearchPreview = (0, import_provider_utils12.createProviderDefinedToolFactory)({
1865
1888
  id: "openai.web_search_preview",
1866
1889
  name: "web_search_preview",
1867
- inputSchema: import_v413.z.object({
1868
- action: import_v413.z.discriminatedUnion("type", [
1869
- import_v413.z.object({
1870
- type: import_v413.z.literal("search"),
1871
- query: import_v413.z.string().nullish()
1890
+ inputSchema: import_v414.z.object({
1891
+ action: import_v414.z.discriminatedUnion("type", [
1892
+ import_v414.z.object({
1893
+ type: import_v414.z.literal("search"),
1894
+ query: import_v414.z.string().nullish()
1872
1895
  }),
1873
- import_v413.z.object({
1874
- type: import_v413.z.literal("open_page"),
1875
- url: import_v413.z.string()
1896
+ import_v414.z.object({
1897
+ type: import_v414.z.literal("open_page"),
1898
+ url: import_v414.z.string()
1876
1899
  }),
1877
- import_v413.z.object({
1878
- type: import_v413.z.literal("find"),
1879
- url: import_v413.z.string(),
1880
- pattern: import_v413.z.string()
1900
+ import_v414.z.object({
1901
+ type: import_v414.z.literal("find"),
1902
+ url: import_v414.z.string(),
1903
+ pattern: import_v414.z.string()
1881
1904
  })
1882
1905
  ]).nullish()
1883
1906
  })
@@ -1922,6 +1945,15 @@ var openaiTools = {
1922
1945
  * @param background - Transparent or opaque
1923
1946
  */
1924
1947
  imageGeneration,
1948
+ /**
1949
+ * Local shell is a tool that allows agents to run shell commands locally
1950
+ * on a machine you or the user provides.
1951
+ *
1952
+ * Supported models: `gpt-5-codex` and `codex-mini-latest`
1953
+ *
1954
+ * Must have name `local_shell`.
1955
+ */
1956
+ localShell,
1925
1957
  /**
1926
1958
  * Web search allows models to access up-to-date information from the internet
1927
1959
  * and provide answers with sourced citations.
@@ -1949,13 +1981,13 @@ var openaiTools = {
1949
1981
 
1950
1982
  // src/responses/openai-responses-language-model.ts
1951
1983
  var import_provider8 = require("@ai-sdk/provider");
1952
- var import_provider_utils13 = require("@ai-sdk/provider-utils");
1953
- var import_v415 = require("zod/v4");
1984
+ var import_provider_utils14 = require("@ai-sdk/provider-utils");
1985
+ var import_v416 = require("zod/v4");
1954
1986
 
1955
1987
  // src/responses/convert-to-openai-responses-input.ts
1956
1988
  var import_provider6 = require("@ai-sdk/provider");
1957
- var import_provider_utils12 = require("@ai-sdk/provider-utils");
1958
- var import_v414 = require("zod/v4");
1989
+ var import_provider_utils13 = require("@ai-sdk/provider-utils");
1990
+ var import_v415 = require("zod/v4");
1959
1991
  function isFileId(data, prefixes) {
1960
1992
  if (!prefixes) return false;
1961
1993
  return prefixes.some((prefix) => data.startsWith(prefix));
@@ -1964,9 +1996,10 @@ async function convertToOpenAIResponsesInput({
1964
1996
  prompt,
1965
1997
  systemMessageMode,
1966
1998
  fileIdPrefixes,
1967
- store
1999
+ store,
2000
+ hasLocalShellTool = false
1968
2001
  }) {
1969
- var _a, _b, _c, _d, _e, _f;
2002
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i;
1970
2003
  const input = [];
1971
2004
  const warnings = [];
1972
2005
  for (const { role, content } of prompt) {
@@ -2012,7 +2045,7 @@ async function convertToOpenAIResponsesInput({
2012
2045
  return {
2013
2046
  type: "input_image",
2014
2047
  ...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
2015
- image_url: `data:${mediaType};base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
2048
+ image_url: `data:${mediaType};base64,${(0, import_provider_utils13.convertToBase64)(part.data)}`
2016
2049
  },
2017
2050
  detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
2018
2051
  };
@@ -2027,7 +2060,7 @@ async function convertToOpenAIResponsesInput({
2027
2060
  type: "input_file",
2028
2061
  ...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
2029
2062
  filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
2030
- file_data: `data:application/pdf;base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
2063
+ file_data: `data:application/pdf;base64,${(0, import_provider_utils13.convertToBase64)(part.data)}`
2031
2064
  }
2032
2065
  };
2033
2066
  } else {
@@ -2059,12 +2092,29 @@ async function convertToOpenAIResponsesInput({
2059
2092
  if (part.providerExecuted) {
2060
2093
  break;
2061
2094
  }
2095
+ if (hasLocalShellTool && part.toolName === "local_shell") {
2096
+ const parsedInput = localShellInputSchema.parse(part.input);
2097
+ input.push({
2098
+ type: "local_shell_call",
2099
+ call_id: part.toolCallId,
2100
+ id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0,
2101
+ action: {
2102
+ type: "exec",
2103
+ command: parsedInput.action.command,
2104
+ timeout_ms: parsedInput.action.timeoutMs,
2105
+ user: parsedInput.action.user,
2106
+ working_directory: parsedInput.action.workingDirectory,
2107
+ env: parsedInput.action.env
2108
+ }
2109
+ });
2110
+ break;
2111
+ }
2062
2112
  input.push({
2063
2113
  type: "function_call",
2064
2114
  call_id: part.toolCallId,
2065
2115
  name: part.toolName,
2066
2116
  arguments: JSON.stringify(part.input),
2067
- id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0
2117
+ id: (_i = (_h = (_g = part.providerOptions) == null ? void 0 : _g.openai) == null ? void 0 : _h.itemId) != null ? _i : void 0
2068
2118
  });
2069
2119
  break;
2070
2120
  }
@@ -2081,7 +2131,7 @@ async function convertToOpenAIResponsesInput({
2081
2131
  break;
2082
2132
  }
2083
2133
  case "reasoning": {
2084
- const providerOptions = await (0, import_provider_utils12.parseProviderOptions)({
2134
+ const providerOptions = await (0, import_provider_utils13.parseProviderOptions)({
2085
2135
  provider: "openai",
2086
2136
  providerOptions: part.providerOptions,
2087
2137
  schema: openaiResponsesReasoningProviderOptionsSchema
@@ -2138,6 +2188,14 @@ async function convertToOpenAIResponsesInput({
2138
2188
  case "tool": {
2139
2189
  for (const part of content) {
2140
2190
  const output = part.output;
2191
+ if (hasLocalShellTool && part.toolName === "local_shell" && output.type === "json") {
2192
+ input.push({
2193
+ type: "local_shell_call_output",
2194
+ call_id: part.toolCallId,
2195
+ output: localShellOutputSchema.parse(output.value).output
2196
+ });
2197
+ break;
2198
+ }
2141
2199
  let contentValue;
2142
2200
  switch (output.type) {
2143
2201
  case "text":
@@ -2166,9 +2224,9 @@ async function convertToOpenAIResponsesInput({
2166
2224
  }
2167
2225
  return { input, warnings };
2168
2226
  }
2169
- var openaiResponsesReasoningProviderOptionsSchema = import_v414.z.object({
2170
- itemId: import_v414.z.string().nullish(),
2171
- reasoningEncryptedContent: import_v414.z.string().nullish()
2227
+ var openaiResponsesReasoningProviderOptionsSchema = import_v415.z.object({
2228
+ itemId: import_v415.z.string().nullish(),
2229
+ reasoningEncryptedContent: import_v415.z.string().nullish()
2172
2230
  });
2173
2231
 
2174
2232
  // src/responses/map-openai-responses-finish-reason.ts
@@ -2229,6 +2287,12 @@ function prepareResponsesTools({
2229
2287
  });
2230
2288
  break;
2231
2289
  }
2290
+ case "openai.local_shell": {
2291
+ openaiTools2.push({
2292
+ type: "local_shell"
2293
+ });
2294
+ break;
2295
+ }
2232
2296
  case "openai.web_search_preview": {
2233
2297
  const args = webSearchPreviewArgsSchema.parse(tool.args);
2234
2298
  openaiTools2.push({
@@ -2308,73 +2372,86 @@ function prepareResponsesTools({
2308
2372
  }
2309
2373
 
2310
2374
  // src/responses/openai-responses-language-model.ts
2311
- var webSearchCallItem = import_v415.z.object({
2312
- type: import_v415.z.literal("web_search_call"),
2313
- id: import_v415.z.string(),
2314
- status: import_v415.z.string(),
2315
- action: import_v415.z.discriminatedUnion("type", [
2316
- import_v415.z.object({
2317
- type: import_v415.z.literal("search"),
2318
- query: import_v415.z.string().nullish()
2375
+ var webSearchCallItem = import_v416.z.object({
2376
+ type: import_v416.z.literal("web_search_call"),
2377
+ id: import_v416.z.string(),
2378
+ status: import_v416.z.string(),
2379
+ action: import_v416.z.discriminatedUnion("type", [
2380
+ import_v416.z.object({
2381
+ type: import_v416.z.literal("search"),
2382
+ query: import_v416.z.string().nullish()
2319
2383
  }),
2320
- import_v415.z.object({
2321
- type: import_v415.z.literal("open_page"),
2322
- url: import_v415.z.string()
2384
+ import_v416.z.object({
2385
+ type: import_v416.z.literal("open_page"),
2386
+ url: import_v416.z.string()
2323
2387
  }),
2324
- import_v415.z.object({
2325
- type: import_v415.z.literal("find"),
2326
- url: import_v415.z.string(),
2327
- pattern: import_v415.z.string()
2388
+ import_v416.z.object({
2389
+ type: import_v416.z.literal("find"),
2390
+ url: import_v416.z.string(),
2391
+ pattern: import_v416.z.string()
2328
2392
  })
2329
2393
  ]).nullish()
2330
2394
  });
2331
- var fileSearchCallItem = import_v415.z.object({
2332
- type: import_v415.z.literal("file_search_call"),
2333
- id: import_v415.z.string(),
2334
- queries: import_v415.z.array(import_v415.z.string()),
2335
- results: import_v415.z.array(
2336
- import_v415.z.object({
2337
- attributes: import_v415.z.record(import_v415.z.string(), import_v415.z.unknown()),
2338
- file_id: import_v415.z.string(),
2339
- filename: import_v415.z.string(),
2340
- score: import_v415.z.number(),
2341
- text: import_v415.z.string()
2395
+ var fileSearchCallItem = import_v416.z.object({
2396
+ type: import_v416.z.literal("file_search_call"),
2397
+ id: import_v416.z.string(),
2398
+ queries: import_v416.z.array(import_v416.z.string()),
2399
+ results: import_v416.z.array(
2400
+ import_v416.z.object({
2401
+ attributes: import_v416.z.record(import_v416.z.string(), import_v416.z.unknown()),
2402
+ file_id: import_v416.z.string(),
2403
+ filename: import_v416.z.string(),
2404
+ score: import_v416.z.number(),
2405
+ text: import_v416.z.string()
2342
2406
  })
2343
2407
  ).nullish()
2344
2408
  });
2345
- var codeInterpreterCallItem = import_v415.z.object({
2346
- type: import_v415.z.literal("code_interpreter_call"),
2347
- id: import_v415.z.string(),
2348
- code: import_v415.z.string().nullable(),
2349
- container_id: import_v415.z.string(),
2350
- outputs: import_v415.z.array(
2351
- import_v415.z.discriminatedUnion("type", [
2352
- import_v415.z.object({ type: import_v415.z.literal("logs"), logs: import_v415.z.string() }),
2353
- import_v415.z.object({ type: import_v415.z.literal("image"), url: import_v415.z.string() })
2409
+ var codeInterpreterCallItem = import_v416.z.object({
2410
+ type: import_v416.z.literal("code_interpreter_call"),
2411
+ id: import_v416.z.string(),
2412
+ code: import_v416.z.string().nullable(),
2413
+ container_id: import_v416.z.string(),
2414
+ outputs: import_v416.z.array(
2415
+ import_v416.z.discriminatedUnion("type", [
2416
+ import_v416.z.object({ type: import_v416.z.literal("logs"), logs: import_v416.z.string() }),
2417
+ import_v416.z.object({ type: import_v416.z.literal("image"), url: import_v416.z.string() })
2354
2418
  ])
2355
2419
  ).nullable()
2356
2420
  });
2357
- var imageGenerationCallItem = import_v415.z.object({
2358
- type: import_v415.z.literal("image_generation_call"),
2359
- id: import_v415.z.string(),
2360
- result: import_v415.z.string()
2421
+ var localShellCallItem = import_v416.z.object({
2422
+ type: import_v416.z.literal("local_shell_call"),
2423
+ id: import_v416.z.string(),
2424
+ call_id: import_v416.z.string(),
2425
+ action: import_v416.z.object({
2426
+ type: import_v416.z.literal("exec"),
2427
+ command: import_v416.z.array(import_v416.z.string()),
2428
+ timeout_ms: import_v416.z.number().optional(),
2429
+ user: import_v416.z.string().optional(),
2430
+ working_directory: import_v416.z.string().optional(),
2431
+ env: import_v416.z.record(import_v416.z.string(), import_v416.z.string()).optional()
2432
+ })
2433
+ });
2434
+ var imageGenerationCallItem = import_v416.z.object({
2435
+ type: import_v416.z.literal("image_generation_call"),
2436
+ id: import_v416.z.string(),
2437
+ result: import_v416.z.string()
2361
2438
  });
2362
2439
  var TOP_LOGPROBS_MAX = 20;
2363
- var LOGPROBS_SCHEMA = import_v415.z.array(
2364
- import_v415.z.object({
2365
- token: import_v415.z.string(),
2366
- logprob: import_v415.z.number(),
2367
- top_logprobs: import_v415.z.array(
2368
- import_v415.z.object({
2369
- token: import_v415.z.string(),
2370
- logprob: import_v415.z.number()
2440
+ var LOGPROBS_SCHEMA = import_v416.z.array(
2441
+ import_v416.z.object({
2442
+ token: import_v416.z.string(),
2443
+ logprob: import_v416.z.number(),
2444
+ top_logprobs: import_v416.z.array(
2445
+ import_v416.z.object({
2446
+ token: import_v416.z.string(),
2447
+ logprob: import_v416.z.number()
2371
2448
  })
2372
2449
  )
2373
2450
  })
2374
2451
  );
2375
2452
  var OpenAIResponsesLanguageModel = class {
2376
2453
  constructor(modelId, config) {
2377
- this.specificationVersion = "v2";
2454
+ this.specificationVersion = "v3";
2378
2455
  this.supportedUrls = {
2379
2456
  "image/*": [/^https?:\/\/.*$/],
2380
2457
  "application/pdf": [/^https?:\/\/.*$/]
@@ -2424,7 +2501,7 @@ var OpenAIResponsesLanguageModel = class {
2424
2501
  if (stopSequences != null) {
2425
2502
  warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
2426
2503
  }
2427
- const openaiOptions = await (0, import_provider_utils13.parseProviderOptions)({
2504
+ const openaiOptions = await (0, import_provider_utils14.parseProviderOptions)({
2428
2505
  provider: "openai",
2429
2506
  providerOptions,
2430
2507
  schema: openaiResponsesProviderOptionsSchema
@@ -2433,7 +2510,8 @@ var OpenAIResponsesLanguageModel = class {
2433
2510
  prompt,
2434
2511
  systemMessageMode: modelConfig.systemMessageMode,
2435
2512
  fileIdPrefixes: this.config.fileIdPrefixes,
2436
- store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true
2513
+ store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true,
2514
+ hasLocalShellTool: hasOpenAITool("openai.local_shell")
2437
2515
  });
2438
2516
  warnings.push(...inputWarnings);
2439
2517
  const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
@@ -2592,51 +2670,51 @@ var OpenAIResponsesLanguageModel = class {
2592
2670
  responseHeaders,
2593
2671
  value: response,
2594
2672
  rawValue: rawResponse
2595
- } = await (0, import_provider_utils13.postJsonToApi)({
2673
+ } = await (0, import_provider_utils14.postJsonToApi)({
2596
2674
  url,
2597
- headers: (0, import_provider_utils13.combineHeaders)(this.config.headers(), options.headers),
2675
+ headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
2598
2676
  body,
2599
2677
  failedResponseHandler: openaiFailedResponseHandler,
2600
- successfulResponseHandler: (0, import_provider_utils13.createJsonResponseHandler)(
2601
- import_v415.z.object({
2602
- id: import_v415.z.string(),
2603
- created_at: import_v415.z.number(),
2604
- error: import_v415.z.object({
2605
- code: import_v415.z.string(),
2606
- message: import_v415.z.string()
2678
+ successfulResponseHandler: (0, import_provider_utils14.createJsonResponseHandler)(
2679
+ import_v416.z.object({
2680
+ id: import_v416.z.string(),
2681
+ created_at: import_v416.z.number(),
2682
+ error: import_v416.z.object({
2683
+ code: import_v416.z.string(),
2684
+ message: import_v416.z.string()
2607
2685
  }).nullish(),
2608
- model: import_v415.z.string(),
2609
- output: import_v415.z.array(
2610
- import_v415.z.discriminatedUnion("type", [
2611
- import_v415.z.object({
2612
- type: import_v415.z.literal("message"),
2613
- role: import_v415.z.literal("assistant"),
2614
- id: import_v415.z.string(),
2615
- content: import_v415.z.array(
2616
- import_v415.z.object({
2617
- type: import_v415.z.literal("output_text"),
2618
- text: import_v415.z.string(),
2686
+ model: import_v416.z.string(),
2687
+ output: import_v416.z.array(
2688
+ import_v416.z.discriminatedUnion("type", [
2689
+ import_v416.z.object({
2690
+ type: import_v416.z.literal("message"),
2691
+ role: import_v416.z.literal("assistant"),
2692
+ id: import_v416.z.string(),
2693
+ content: import_v416.z.array(
2694
+ import_v416.z.object({
2695
+ type: import_v416.z.literal("output_text"),
2696
+ text: import_v416.z.string(),
2619
2697
  logprobs: LOGPROBS_SCHEMA.nullish(),
2620
- annotations: import_v415.z.array(
2621
- import_v415.z.discriminatedUnion("type", [
2622
- import_v415.z.object({
2623
- type: import_v415.z.literal("url_citation"),
2624
- start_index: import_v415.z.number(),
2625
- end_index: import_v415.z.number(),
2626
- url: import_v415.z.string(),
2627
- title: import_v415.z.string()
2698
+ annotations: import_v416.z.array(
2699
+ import_v416.z.discriminatedUnion("type", [
2700
+ import_v416.z.object({
2701
+ type: import_v416.z.literal("url_citation"),
2702
+ start_index: import_v416.z.number(),
2703
+ end_index: import_v416.z.number(),
2704
+ url: import_v416.z.string(),
2705
+ title: import_v416.z.string()
2628
2706
  }),
2629
- import_v415.z.object({
2630
- type: import_v415.z.literal("file_citation"),
2631
- file_id: import_v415.z.string(),
2632
- filename: import_v415.z.string().nullish(),
2633
- index: import_v415.z.number().nullish(),
2634
- start_index: import_v415.z.number().nullish(),
2635
- end_index: import_v415.z.number().nullish(),
2636
- quote: import_v415.z.string().nullish()
2707
+ import_v416.z.object({
2708
+ type: import_v416.z.literal("file_citation"),
2709
+ file_id: import_v416.z.string(),
2710
+ filename: import_v416.z.string().nullish(),
2711
+ index: import_v416.z.number().nullish(),
2712
+ start_index: import_v416.z.number().nullish(),
2713
+ end_index: import_v416.z.number().nullish(),
2714
+ quote: import_v416.z.string().nullish()
2637
2715
  }),
2638
- import_v415.z.object({
2639
- type: import_v415.z.literal("container_file_citation")
2716
+ import_v416.z.object({
2717
+ type: import_v416.z.literal("container_file_citation")
2640
2718
  })
2641
2719
  ])
2642
2720
  )
@@ -2647,33 +2725,34 @@ var OpenAIResponsesLanguageModel = class {
2647
2725
  fileSearchCallItem,
2648
2726
  codeInterpreterCallItem,
2649
2727
  imageGenerationCallItem,
2650
- import_v415.z.object({
2651
- type: import_v415.z.literal("function_call"),
2652
- call_id: import_v415.z.string(),
2653
- name: import_v415.z.string(),
2654
- arguments: import_v415.z.string(),
2655
- id: import_v415.z.string()
2728
+ localShellCallItem,
2729
+ import_v416.z.object({
2730
+ type: import_v416.z.literal("function_call"),
2731
+ call_id: import_v416.z.string(),
2732
+ name: import_v416.z.string(),
2733
+ arguments: import_v416.z.string(),
2734
+ id: import_v416.z.string()
2656
2735
  }),
2657
- import_v415.z.object({
2658
- type: import_v415.z.literal("computer_call"),
2659
- id: import_v415.z.string(),
2660
- status: import_v415.z.string().optional()
2736
+ import_v416.z.object({
2737
+ type: import_v416.z.literal("computer_call"),
2738
+ id: import_v416.z.string(),
2739
+ status: import_v416.z.string().optional()
2661
2740
  }),
2662
- import_v415.z.object({
2663
- type: import_v415.z.literal("reasoning"),
2664
- id: import_v415.z.string(),
2665
- encrypted_content: import_v415.z.string().nullish(),
2666
- summary: import_v415.z.array(
2667
- import_v415.z.object({
2668
- type: import_v415.z.literal("summary_text"),
2669
- text: import_v415.z.string()
2741
+ import_v416.z.object({
2742
+ type: import_v416.z.literal("reasoning"),
2743
+ id: import_v416.z.string(),
2744
+ encrypted_content: import_v416.z.string().nullish(),
2745
+ summary: import_v416.z.array(
2746
+ import_v416.z.object({
2747
+ type: import_v416.z.literal("summary_text"),
2748
+ text: import_v416.z.string()
2670
2749
  })
2671
2750
  )
2672
2751
  })
2673
2752
  ])
2674
2753
  ),
2675
- service_tier: import_v415.z.string().nullish(),
2676
- incomplete_details: import_v415.z.object({ reason: import_v415.z.string() }).nullish(),
2754
+ service_tier: import_v416.z.string().nullish(),
2755
+ incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
2677
2756
  usage: usageSchema2
2678
2757
  })
2679
2758
  ),
@@ -2733,6 +2812,20 @@ var OpenAIResponsesLanguageModel = class {
2733
2812
  });
2734
2813
  break;
2735
2814
  }
2815
+ case "local_shell_call": {
2816
+ content.push({
2817
+ type: "tool-call",
2818
+ toolCallId: part.call_id,
2819
+ toolName: "local_shell",
2820
+ input: JSON.stringify({ action: part.action }),
2821
+ providerMetadata: {
2822
+ openai: {
2823
+ itemId: part.id
2824
+ }
2825
+ }
2826
+ });
2827
+ break;
2828
+ }
2736
2829
  case "message": {
2737
2830
  for (const contentPart of part.content) {
2738
2831
  if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
@@ -2752,7 +2845,7 @@ var OpenAIResponsesLanguageModel = class {
2752
2845
  content.push({
2753
2846
  type: "source",
2754
2847
  sourceType: "url",
2755
- id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils13.generateId)(),
2848
+ id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils14.generateId)(),
2756
2849
  url: annotation.url,
2757
2850
  title: annotation.title
2758
2851
  });
@@ -2760,7 +2853,7 @@ var OpenAIResponsesLanguageModel = class {
2760
2853
  content.push({
2761
2854
  type: "source",
2762
2855
  sourceType: "document",
2763
- id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils13.generateId)(),
2856
+ id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils14.generateId)(),
2764
2857
  mediaType: "text/plain",
2765
2858
  title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
2766
2859
  filename: (_l = annotation.filename) != null ? _l : annotation.file_id
@@ -2912,18 +3005,18 @@ var OpenAIResponsesLanguageModel = class {
2912
3005
  warnings,
2913
3006
  webSearchToolName
2914
3007
  } = await this.getArgs(options);
2915
- const { responseHeaders, value: response } = await (0, import_provider_utils13.postJsonToApi)({
3008
+ const { responseHeaders, value: response } = await (0, import_provider_utils14.postJsonToApi)({
2916
3009
  url: this.config.url({
2917
3010
  path: "/responses",
2918
3011
  modelId: this.modelId
2919
3012
  }),
2920
- headers: (0, import_provider_utils13.combineHeaders)(this.config.headers(), options.headers),
3013
+ headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
2921
3014
  body: {
2922
3015
  ...body,
2923
3016
  stream: true
2924
3017
  },
2925
3018
  failedResponseHandler: openaiFailedResponseHandler,
2926
- successfulResponseHandler: (0, import_provider_utils13.createEventSourceResponseHandler)(
3019
+ successfulResponseHandler: (0, import_provider_utils14.createEventSourceResponseHandler)(
2927
3020
  openaiResponsesChunkSchema
2928
3021
  ),
2929
3022
  abortSignal: options.abortSignal,
@@ -2990,6 +3083,24 @@ var OpenAIResponsesLanguageModel = class {
2990
3083
  id: value.item.id,
2991
3084
  toolName: "computer_use"
2992
3085
  });
3086
+ } else if (value.item.type === "code_interpreter_call") {
3087
+ ongoingToolCalls[value.output_index] = {
3088
+ toolName: "code_interpreter",
3089
+ toolCallId: value.item.id,
3090
+ codeInterpreter: {
3091
+ containerId: value.item.container_id
3092
+ }
3093
+ };
3094
+ controller.enqueue({
3095
+ type: "tool-input-start",
3096
+ id: value.item.id,
3097
+ toolName: "code_interpreter"
3098
+ });
3099
+ controller.enqueue({
3100
+ type: "tool-input-delta",
3101
+ id: value.item.id,
3102
+ delta: `{"containerId":"${value.item.container_id}","code":"`
3103
+ });
2993
3104
  } else if (value.item.type === "file_search_call") {
2994
3105
  controller.enqueue({
2995
3106
  type: "tool-call",
@@ -3113,16 +3224,7 @@ var OpenAIResponsesLanguageModel = class {
3113
3224
  providerExecuted: true
3114
3225
  });
3115
3226
  } else if (value.item.type === "code_interpreter_call") {
3116
- controller.enqueue({
3117
- type: "tool-call",
3118
- toolCallId: value.item.id,
3119
- toolName: "code_interpreter",
3120
- input: JSON.stringify({
3121
- code: value.item.code,
3122
- containerId: value.item.container_id
3123
- }),
3124
- providerExecuted: true
3125
- });
3227
+ ongoingToolCalls[value.output_index] = void 0;
3126
3228
  controller.enqueue({
3127
3229
  type: "tool-result",
3128
3230
  toolCallId: value.item.id,
@@ -3142,6 +3244,26 @@ var OpenAIResponsesLanguageModel = class {
3142
3244
  },
3143
3245
  providerExecuted: true
3144
3246
  });
3247
+ } else if (value.item.type === "local_shell_call") {
3248
+ ongoingToolCalls[value.output_index] = void 0;
3249
+ controller.enqueue({
3250
+ type: "tool-call",
3251
+ toolCallId: value.item.call_id,
3252
+ toolName: "local_shell",
3253
+ input: JSON.stringify({
3254
+ action: {
3255
+ type: "exec",
3256
+ command: value.item.action.command,
3257
+ timeoutMs: value.item.action.timeout_ms,
3258
+ user: value.item.action.user,
3259
+ workingDirectory: value.item.action.working_directory,
3260
+ env: value.item.action.env
3261
+ }
3262
+ }),
3263
+ providerMetadata: {
3264
+ openai: { itemId: value.item.id }
3265
+ }
3266
+ });
3145
3267
  } else if (value.item.type === "message") {
3146
3268
  controller.enqueue({
3147
3269
  type: "text-end",
@@ -3172,6 +3294,40 @@ var OpenAIResponsesLanguageModel = class {
3172
3294
  delta: value.delta
3173
3295
  });
3174
3296
  }
3297
+ } else if (isResponseCodeInterpreterCallCodeDeltaChunk(value)) {
3298
+ const toolCall = ongoingToolCalls[value.output_index];
3299
+ if (toolCall != null) {
3300
+ controller.enqueue({
3301
+ type: "tool-input-delta",
3302
+ id: toolCall.toolCallId,
3303
+ // The delta is code, which is embedding in a JSON string.
3304
+ // To escape it, we use JSON.stringify and slice to remove the outer quotes.
3305
+ delta: JSON.stringify(value.delta).slice(1, -1)
3306
+ });
3307
+ }
3308
+ } else if (isResponseCodeInterpreterCallCodeDoneChunk(value)) {
3309
+ const toolCall = ongoingToolCalls[value.output_index];
3310
+ if (toolCall != null) {
3311
+ controller.enqueue({
3312
+ type: "tool-input-delta",
3313
+ id: toolCall.toolCallId,
3314
+ delta: '"}'
3315
+ });
3316
+ controller.enqueue({
3317
+ type: "tool-input-end",
3318
+ id: toolCall.toolCallId
3319
+ });
3320
+ controller.enqueue({
3321
+ type: "tool-call",
3322
+ toolCallId: toolCall.toolCallId,
3323
+ toolName: "code_interpreter",
3324
+ input: JSON.stringify({
3325
+ code: value.code,
3326
+ containerId: toolCall.codeInterpreter.containerId
3327
+ }),
3328
+ providerExecuted: true
3329
+ });
3330
+ }
3175
3331
  } else if (isResponseCreatedChunk(value)) {
3176
3332
  responseId = value.response.id;
3177
3333
  controller.enqueue({
@@ -3234,7 +3390,7 @@ var OpenAIResponsesLanguageModel = class {
3234
3390
  controller.enqueue({
3235
3391
  type: "source",
3236
3392
  sourceType: "url",
3237
- id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils13.generateId)(),
3393
+ id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils14.generateId)(),
3238
3394
  url: value.annotation.url,
3239
3395
  title: value.annotation.title
3240
3396
  });
@@ -3242,7 +3398,7 @@ var OpenAIResponsesLanguageModel = class {
3242
3398
  controller.enqueue({
3243
3399
  type: "source",
3244
3400
  sourceType: "document",
3245
- id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils13.generateId)(),
3401
+ id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils14.generateId)(),
3246
3402
  mediaType: "text/plain",
3247
3403
  title: (_v = (_u = value.annotation.quote) != null ? _u : value.annotation.filename) != null ? _v : "Document",
3248
3404
  filename: (_w = value.annotation.filename) != null ? _w : value.annotation.file_id
@@ -3278,166 +3434,194 @@ var OpenAIResponsesLanguageModel = class {
3278
3434
  };
3279
3435
  }
3280
3436
  };
3281
- var usageSchema2 = import_v415.z.object({
3282
- input_tokens: import_v415.z.number(),
3283
- input_tokens_details: import_v415.z.object({ cached_tokens: import_v415.z.number().nullish() }).nullish(),
3284
- output_tokens: import_v415.z.number(),
3285
- output_tokens_details: import_v415.z.object({ reasoning_tokens: import_v415.z.number().nullish() }).nullish()
3437
+ var usageSchema2 = import_v416.z.object({
3438
+ input_tokens: import_v416.z.number(),
3439
+ input_tokens_details: import_v416.z.object({ cached_tokens: import_v416.z.number().nullish() }).nullish(),
3440
+ output_tokens: import_v416.z.number(),
3441
+ output_tokens_details: import_v416.z.object({ reasoning_tokens: import_v416.z.number().nullish() }).nullish()
3286
3442
  });
3287
- var textDeltaChunkSchema = import_v415.z.object({
3288
- type: import_v415.z.literal("response.output_text.delta"),
3289
- item_id: import_v415.z.string(),
3290
- delta: import_v415.z.string(),
3443
+ var textDeltaChunkSchema = import_v416.z.object({
3444
+ type: import_v416.z.literal("response.output_text.delta"),
3445
+ item_id: import_v416.z.string(),
3446
+ delta: import_v416.z.string(),
3291
3447
  logprobs: LOGPROBS_SCHEMA.nullish()
3292
3448
  });
3293
- var errorChunkSchema = import_v415.z.object({
3294
- type: import_v415.z.literal("error"),
3295
- code: import_v415.z.string(),
3296
- message: import_v415.z.string(),
3297
- param: import_v415.z.string().nullish(),
3298
- sequence_number: import_v415.z.number()
3449
+ var errorChunkSchema = import_v416.z.object({
3450
+ type: import_v416.z.literal("error"),
3451
+ code: import_v416.z.string(),
3452
+ message: import_v416.z.string(),
3453
+ param: import_v416.z.string().nullish(),
3454
+ sequence_number: import_v416.z.number()
3299
3455
  });
3300
- var responseFinishedChunkSchema = import_v415.z.object({
3301
- type: import_v415.z.enum(["response.completed", "response.incomplete"]),
3302
- response: import_v415.z.object({
3303
- incomplete_details: import_v415.z.object({ reason: import_v415.z.string() }).nullish(),
3456
+ var responseFinishedChunkSchema = import_v416.z.object({
3457
+ type: import_v416.z.enum(["response.completed", "response.incomplete"]),
3458
+ response: import_v416.z.object({
3459
+ incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
3304
3460
  usage: usageSchema2,
3305
- service_tier: import_v415.z.string().nullish()
3461
+ service_tier: import_v416.z.string().nullish()
3306
3462
  })
3307
3463
  });
3308
- var responseCreatedChunkSchema = import_v415.z.object({
3309
- type: import_v415.z.literal("response.created"),
3310
- response: import_v415.z.object({
3311
- id: import_v415.z.string(),
3312
- created_at: import_v415.z.number(),
3313
- model: import_v415.z.string(),
3314
- service_tier: import_v415.z.string().nullish()
3464
+ var responseCreatedChunkSchema = import_v416.z.object({
3465
+ type: import_v416.z.literal("response.created"),
3466
+ response: import_v416.z.object({
3467
+ id: import_v416.z.string(),
3468
+ created_at: import_v416.z.number(),
3469
+ model: import_v416.z.string(),
3470
+ service_tier: import_v416.z.string().nullish()
3315
3471
  })
3316
3472
  });
3317
- var responseOutputItemAddedSchema = import_v415.z.object({
3318
- type: import_v415.z.literal("response.output_item.added"),
3319
- output_index: import_v415.z.number(),
3320
- item: import_v415.z.discriminatedUnion("type", [
3321
- import_v415.z.object({
3322
- type: import_v415.z.literal("message"),
3323
- id: import_v415.z.string()
3473
+ var responseOutputItemAddedSchema = import_v416.z.object({
3474
+ type: import_v416.z.literal("response.output_item.added"),
3475
+ output_index: import_v416.z.number(),
3476
+ item: import_v416.z.discriminatedUnion("type", [
3477
+ import_v416.z.object({
3478
+ type: import_v416.z.literal("message"),
3479
+ id: import_v416.z.string()
3324
3480
  }),
3325
- import_v415.z.object({
3326
- type: import_v415.z.literal("reasoning"),
3327
- id: import_v415.z.string(),
3328
- encrypted_content: import_v415.z.string().nullish()
3481
+ import_v416.z.object({
3482
+ type: import_v416.z.literal("reasoning"),
3483
+ id: import_v416.z.string(),
3484
+ encrypted_content: import_v416.z.string().nullish()
3329
3485
  }),
3330
- import_v415.z.object({
3331
- type: import_v415.z.literal("function_call"),
3332
- id: import_v415.z.string(),
3333
- call_id: import_v415.z.string(),
3334
- name: import_v415.z.string(),
3335
- arguments: import_v415.z.string()
3486
+ import_v416.z.object({
3487
+ type: import_v416.z.literal("function_call"),
3488
+ id: import_v416.z.string(),
3489
+ call_id: import_v416.z.string(),
3490
+ name: import_v416.z.string(),
3491
+ arguments: import_v416.z.string()
3336
3492
  }),
3337
- import_v415.z.object({
3338
- type: import_v415.z.literal("web_search_call"),
3339
- id: import_v415.z.string(),
3340
- status: import_v415.z.string(),
3341
- action: import_v415.z.object({
3342
- type: import_v415.z.literal("search"),
3343
- query: import_v415.z.string().optional()
3493
+ import_v416.z.object({
3494
+ type: import_v416.z.literal("web_search_call"),
3495
+ id: import_v416.z.string(),
3496
+ status: import_v416.z.string(),
3497
+ action: import_v416.z.object({
3498
+ type: import_v416.z.literal("search"),
3499
+ query: import_v416.z.string().optional()
3344
3500
  }).nullish()
3345
3501
  }),
3346
- import_v415.z.object({
3347
- type: import_v415.z.literal("computer_call"),
3348
- id: import_v415.z.string(),
3349
- status: import_v415.z.string()
3502
+ import_v416.z.object({
3503
+ type: import_v416.z.literal("computer_call"),
3504
+ id: import_v416.z.string(),
3505
+ status: import_v416.z.string()
3350
3506
  }),
3351
- import_v415.z.object({
3352
- type: import_v415.z.literal("file_search_call"),
3353
- id: import_v415.z.string()
3507
+ import_v416.z.object({
3508
+ type: import_v416.z.literal("file_search_call"),
3509
+ id: import_v416.z.string()
3354
3510
  }),
3355
- import_v415.z.object({
3356
- type: import_v415.z.literal("image_generation_call"),
3357
- id: import_v415.z.string()
3511
+ import_v416.z.object({
3512
+ type: import_v416.z.literal("image_generation_call"),
3513
+ id: import_v416.z.string()
3514
+ }),
3515
+ import_v416.z.object({
3516
+ type: import_v416.z.literal("code_interpreter_call"),
3517
+ id: import_v416.z.string(),
3518
+ container_id: import_v416.z.string(),
3519
+ code: import_v416.z.string().nullable(),
3520
+ outputs: import_v416.z.array(
3521
+ import_v416.z.discriminatedUnion("type", [
3522
+ import_v416.z.object({ type: import_v416.z.literal("logs"), logs: import_v416.z.string() }),
3523
+ import_v416.z.object({ type: import_v416.z.literal("image"), url: import_v416.z.string() })
3524
+ ])
3525
+ ).nullable(),
3526
+ status: import_v416.z.string()
3358
3527
  })
3359
3528
  ])
3360
3529
  });
3361
- var responseOutputItemDoneSchema = import_v415.z.object({
3362
- type: import_v415.z.literal("response.output_item.done"),
3363
- output_index: import_v415.z.number(),
3364
- item: import_v415.z.discriminatedUnion("type", [
3365
- import_v415.z.object({
3366
- type: import_v415.z.literal("message"),
3367
- id: import_v415.z.string()
3530
+ var responseOutputItemDoneSchema = import_v416.z.object({
3531
+ type: import_v416.z.literal("response.output_item.done"),
3532
+ output_index: import_v416.z.number(),
3533
+ item: import_v416.z.discriminatedUnion("type", [
3534
+ import_v416.z.object({
3535
+ type: import_v416.z.literal("message"),
3536
+ id: import_v416.z.string()
3368
3537
  }),
3369
- import_v415.z.object({
3370
- type: import_v415.z.literal("reasoning"),
3371
- id: import_v415.z.string(),
3372
- encrypted_content: import_v415.z.string().nullish()
3538
+ import_v416.z.object({
3539
+ type: import_v416.z.literal("reasoning"),
3540
+ id: import_v416.z.string(),
3541
+ encrypted_content: import_v416.z.string().nullish()
3373
3542
  }),
3374
- import_v415.z.object({
3375
- type: import_v415.z.literal("function_call"),
3376
- id: import_v415.z.string(),
3377
- call_id: import_v415.z.string(),
3378
- name: import_v415.z.string(),
3379
- arguments: import_v415.z.string(),
3380
- status: import_v415.z.literal("completed")
3543
+ import_v416.z.object({
3544
+ type: import_v416.z.literal("function_call"),
3545
+ id: import_v416.z.string(),
3546
+ call_id: import_v416.z.string(),
3547
+ name: import_v416.z.string(),
3548
+ arguments: import_v416.z.string(),
3549
+ status: import_v416.z.literal("completed")
3381
3550
  }),
3382
3551
  codeInterpreterCallItem,
3383
3552
  imageGenerationCallItem,
3384
3553
  webSearchCallItem,
3385
3554
  fileSearchCallItem,
3386
- import_v415.z.object({
3387
- type: import_v415.z.literal("computer_call"),
3388
- id: import_v415.z.string(),
3389
- status: import_v415.z.literal("completed")
3555
+ localShellCallItem,
3556
+ import_v416.z.object({
3557
+ type: import_v416.z.literal("computer_call"),
3558
+ id: import_v416.z.string(),
3559
+ status: import_v416.z.literal("completed")
3390
3560
  })
3391
3561
  ])
3392
3562
  });
3393
- var responseFunctionCallArgumentsDeltaSchema = import_v415.z.object({
3394
- type: import_v415.z.literal("response.function_call_arguments.delta"),
3395
- item_id: import_v415.z.string(),
3396
- output_index: import_v415.z.number(),
3397
- delta: import_v415.z.string()
3563
+ var responseFunctionCallArgumentsDeltaSchema = import_v416.z.object({
3564
+ type: import_v416.z.literal("response.function_call_arguments.delta"),
3565
+ item_id: import_v416.z.string(),
3566
+ output_index: import_v416.z.number(),
3567
+ delta: import_v416.z.string()
3568
+ });
3569
+ var responseCodeInterpreterCallCodeDeltaSchema = import_v416.z.object({
3570
+ type: import_v416.z.literal("response.code_interpreter_call_code.delta"),
3571
+ item_id: import_v416.z.string(),
3572
+ output_index: import_v416.z.number(),
3573
+ delta: import_v416.z.string()
3574
+ });
3575
+ var responseCodeInterpreterCallCodeDoneSchema = import_v416.z.object({
3576
+ type: import_v416.z.literal("response.code_interpreter_call_code.done"),
3577
+ item_id: import_v416.z.string(),
3578
+ output_index: import_v416.z.number(),
3579
+ code: import_v416.z.string()
3398
3580
  });
3399
- var responseAnnotationAddedSchema = import_v415.z.object({
3400
- type: import_v415.z.literal("response.output_text.annotation.added"),
3401
- annotation: import_v415.z.discriminatedUnion("type", [
3402
- import_v415.z.object({
3403
- type: import_v415.z.literal("url_citation"),
3404
- url: import_v415.z.string(),
3405
- title: import_v415.z.string()
3581
+ var responseAnnotationAddedSchema = import_v416.z.object({
3582
+ type: import_v416.z.literal("response.output_text.annotation.added"),
3583
+ annotation: import_v416.z.discriminatedUnion("type", [
3584
+ import_v416.z.object({
3585
+ type: import_v416.z.literal("url_citation"),
3586
+ url: import_v416.z.string(),
3587
+ title: import_v416.z.string()
3406
3588
  }),
3407
- import_v415.z.object({
3408
- type: import_v415.z.literal("file_citation"),
3409
- file_id: import_v415.z.string(),
3410
- filename: import_v415.z.string().nullish(),
3411
- index: import_v415.z.number().nullish(),
3412
- start_index: import_v415.z.number().nullish(),
3413
- end_index: import_v415.z.number().nullish(),
3414
- quote: import_v415.z.string().nullish()
3589
+ import_v416.z.object({
3590
+ type: import_v416.z.literal("file_citation"),
3591
+ file_id: import_v416.z.string(),
3592
+ filename: import_v416.z.string().nullish(),
3593
+ index: import_v416.z.number().nullish(),
3594
+ start_index: import_v416.z.number().nullish(),
3595
+ end_index: import_v416.z.number().nullish(),
3596
+ quote: import_v416.z.string().nullish()
3415
3597
  })
3416
3598
  ])
3417
3599
  });
3418
- var responseReasoningSummaryPartAddedSchema = import_v415.z.object({
3419
- type: import_v415.z.literal("response.reasoning_summary_part.added"),
3420
- item_id: import_v415.z.string(),
3421
- summary_index: import_v415.z.number()
3600
+ var responseReasoningSummaryPartAddedSchema = import_v416.z.object({
3601
+ type: import_v416.z.literal("response.reasoning_summary_part.added"),
3602
+ item_id: import_v416.z.string(),
3603
+ summary_index: import_v416.z.number()
3422
3604
  });
3423
- var responseReasoningSummaryTextDeltaSchema = import_v415.z.object({
3424
- type: import_v415.z.literal("response.reasoning_summary_text.delta"),
3425
- item_id: import_v415.z.string(),
3426
- summary_index: import_v415.z.number(),
3427
- delta: import_v415.z.string()
3605
+ var responseReasoningSummaryTextDeltaSchema = import_v416.z.object({
3606
+ type: import_v416.z.literal("response.reasoning_summary_text.delta"),
3607
+ item_id: import_v416.z.string(),
3608
+ summary_index: import_v416.z.number(),
3609
+ delta: import_v416.z.string()
3428
3610
  });
3429
- var openaiResponsesChunkSchema = import_v415.z.union([
3611
+ var openaiResponsesChunkSchema = import_v416.z.union([
3430
3612
  textDeltaChunkSchema,
3431
3613
  responseFinishedChunkSchema,
3432
3614
  responseCreatedChunkSchema,
3433
3615
  responseOutputItemAddedSchema,
3434
3616
  responseOutputItemDoneSchema,
3435
3617
  responseFunctionCallArgumentsDeltaSchema,
3618
+ responseCodeInterpreterCallCodeDeltaSchema,
3619
+ responseCodeInterpreterCallCodeDoneSchema,
3436
3620
  responseAnnotationAddedSchema,
3437
3621
  responseReasoningSummaryPartAddedSchema,
3438
3622
  responseReasoningSummaryTextDeltaSchema,
3439
3623
  errorChunkSchema,
3440
- import_v415.z.object({ type: import_v415.z.string() }).loose()
3624
+ import_v416.z.object({ type: import_v416.z.string() }).loose()
3441
3625
  // fallback for unknown chunks
3442
3626
  ]);
3443
3627
  function isTextDeltaChunk(chunk) {
@@ -3458,6 +3642,12 @@ function isResponseCreatedChunk(chunk) {
3458
3642
  function isResponseFunctionCallArgumentsDeltaChunk(chunk) {
3459
3643
  return chunk.type === "response.function_call_arguments.delta";
3460
3644
  }
3645
+ function isResponseCodeInterpreterCallCodeDeltaChunk(chunk) {
3646
+ return chunk.type === "response.code_interpreter_call_code.delta";
3647
+ }
3648
+ function isResponseCodeInterpreterCallCodeDoneChunk(chunk) {
3649
+ return chunk.type === "response.code_interpreter_call_code.done";
3650
+ }
3461
3651
  function isResponseOutputItemAddedChunk(chunk) {
3462
3652
  return chunk.type === "response.output_item.added";
3463
3653
  }
@@ -3510,15 +3700,15 @@ function getResponsesModelConfig(modelId) {
3510
3700
  isReasoningModel: false
3511
3701
  };
3512
3702
  }
3513
- var openaiResponsesProviderOptionsSchema = import_v415.z.object({
3514
- include: import_v415.z.array(
3515
- import_v415.z.enum([
3703
+ var openaiResponsesProviderOptionsSchema = import_v416.z.object({
3704
+ include: import_v416.z.array(
3705
+ import_v416.z.enum([
3516
3706
  "reasoning.encrypted_content",
3517
3707
  "file_search_call.results",
3518
3708
  "message.output_text.logprobs"
3519
3709
  ])
3520
3710
  ).nullish(),
3521
- instructions: import_v415.z.string().nullish(),
3711
+ instructions: import_v416.z.string().nullish(),
3522
3712
  /**
3523
3713
  * Return the log probabilities of the tokens.
3524
3714
  *
@@ -3531,33 +3721,33 @@ var openaiResponsesProviderOptionsSchema = import_v415.z.object({
3531
3721
  * @see https://platform.openai.com/docs/api-reference/responses/create
3532
3722
  * @see https://cookbook.openai.com/examples/using_logprobs
3533
3723
  */
3534
- logprobs: import_v415.z.union([import_v415.z.boolean(), import_v415.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
3724
+ logprobs: import_v416.z.union([import_v416.z.boolean(), import_v416.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
3535
3725
  /**
3536
3726
  * The maximum number of total calls to built-in tools that can be processed in a response.
3537
3727
  * This maximum number applies across all built-in tool calls, not per individual tool.
3538
3728
  * Any further attempts to call a tool by the model will be ignored.
3539
3729
  */
3540
- maxToolCalls: import_v415.z.number().nullish(),
3541
- metadata: import_v415.z.any().nullish(),
3542
- parallelToolCalls: import_v415.z.boolean().nullish(),
3543
- previousResponseId: import_v415.z.string().nullish(),
3544
- promptCacheKey: import_v415.z.string().nullish(),
3545
- reasoningEffort: import_v415.z.string().nullish(),
3546
- reasoningSummary: import_v415.z.string().nullish(),
3547
- safetyIdentifier: import_v415.z.string().nullish(),
3548
- serviceTier: import_v415.z.enum(["auto", "flex", "priority"]).nullish(),
3549
- store: import_v415.z.boolean().nullish(),
3550
- strictJsonSchema: import_v415.z.boolean().nullish(),
3551
- textVerbosity: import_v415.z.enum(["low", "medium", "high"]).nullish(),
3552
- user: import_v415.z.string().nullish()
3730
+ maxToolCalls: import_v416.z.number().nullish(),
3731
+ metadata: import_v416.z.any().nullish(),
3732
+ parallelToolCalls: import_v416.z.boolean().nullish(),
3733
+ previousResponseId: import_v416.z.string().nullish(),
3734
+ promptCacheKey: import_v416.z.string().nullish(),
3735
+ reasoningEffort: import_v416.z.string().nullish(),
3736
+ reasoningSummary: import_v416.z.string().nullish(),
3737
+ safetyIdentifier: import_v416.z.string().nullish(),
3738
+ serviceTier: import_v416.z.enum(["auto", "flex", "priority"]).nullish(),
3739
+ store: import_v416.z.boolean().nullish(),
3740
+ strictJsonSchema: import_v416.z.boolean().nullish(),
3741
+ textVerbosity: import_v416.z.enum(["low", "medium", "high"]).nullish(),
3742
+ user: import_v416.z.string().nullish()
3553
3743
  });
3554
3744
 
3555
3745
  // src/speech/openai-speech-model.ts
3556
- var import_provider_utils14 = require("@ai-sdk/provider-utils");
3557
- var import_v416 = require("zod/v4");
3558
- var OpenAIProviderOptionsSchema = import_v416.z.object({
3559
- instructions: import_v416.z.string().nullish(),
3560
- speed: import_v416.z.number().min(0.25).max(4).default(1).nullish()
3746
+ var import_provider_utils15 = require("@ai-sdk/provider-utils");
3747
+ var import_v417 = require("zod/v4");
3748
+ var OpenAIProviderOptionsSchema = import_v417.z.object({
3749
+ instructions: import_v417.z.string().nullish(),
3750
+ speed: import_v417.z.number().min(0.25).max(4).default(1).nullish()
3561
3751
  });
3562
3752
  var OpenAISpeechModel = class {
3563
3753
  constructor(modelId, config) {
@@ -3578,7 +3768,7 @@ var OpenAISpeechModel = class {
3578
3768
  providerOptions
3579
3769
  }) {
3580
3770
  const warnings = [];
3581
- const openAIOptions = await (0, import_provider_utils14.parseProviderOptions)({
3771
+ const openAIOptions = await (0, import_provider_utils15.parseProviderOptions)({
3582
3772
  provider: "openai",
3583
3773
  providerOptions,
3584
3774
  schema: OpenAIProviderOptionsSchema
@@ -3631,15 +3821,15 @@ var OpenAISpeechModel = class {
3631
3821
  value: audio,
3632
3822
  responseHeaders,
3633
3823
  rawValue: rawResponse
3634
- } = await (0, import_provider_utils14.postJsonToApi)({
3824
+ } = await (0, import_provider_utils15.postJsonToApi)({
3635
3825
  url: this.config.url({
3636
3826
  path: "/audio/speech",
3637
3827
  modelId: this.modelId
3638
3828
  }),
3639
- headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
3829
+ headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
3640
3830
  body: requestBody,
3641
3831
  failedResponseHandler: openaiFailedResponseHandler,
3642
- successfulResponseHandler: (0, import_provider_utils14.createBinaryResponseHandler)(),
3832
+ successfulResponseHandler: (0, import_provider_utils15.createBinaryResponseHandler)(),
3643
3833
  abortSignal: options.abortSignal,
3644
3834
  fetch: this.config.fetch
3645
3835
  });
@@ -3660,34 +3850,34 @@ var OpenAISpeechModel = class {
3660
3850
  };
3661
3851
 
3662
3852
  // src/transcription/openai-transcription-model.ts
3663
- var import_provider_utils15 = require("@ai-sdk/provider-utils");
3664
- var import_v418 = require("zod/v4");
3853
+ var import_provider_utils16 = require("@ai-sdk/provider-utils");
3854
+ var import_v419 = require("zod/v4");
3665
3855
 
3666
3856
  // src/transcription/openai-transcription-options.ts
3667
- var import_v417 = require("zod/v4");
3668
- var openAITranscriptionProviderOptions = import_v417.z.object({
3857
+ var import_v418 = require("zod/v4");
3858
+ var openAITranscriptionProviderOptions = import_v418.z.object({
3669
3859
  /**
3670
3860
  * Additional information to include in the transcription response.
3671
3861
  */
3672
- include: import_v417.z.array(import_v417.z.string()).optional(),
3862
+ include: import_v418.z.array(import_v418.z.string()).optional(),
3673
3863
  /**
3674
3864
  * The language of the input audio in ISO-639-1 format.
3675
3865
  */
3676
- language: import_v417.z.string().optional(),
3866
+ language: import_v418.z.string().optional(),
3677
3867
  /**
3678
3868
  * An optional text to guide the model's style or continue a previous audio segment.
3679
3869
  */
3680
- prompt: import_v417.z.string().optional(),
3870
+ prompt: import_v418.z.string().optional(),
3681
3871
  /**
3682
3872
  * The sampling temperature, between 0 and 1.
3683
3873
  * @default 0
3684
3874
  */
3685
- temperature: import_v417.z.number().min(0).max(1).default(0).optional(),
3875
+ temperature: import_v418.z.number().min(0).max(1).default(0).optional(),
3686
3876
  /**
3687
3877
  * The timestamp granularities to populate for this transcription.
3688
3878
  * @default ['segment']
3689
3879
  */
3690
- timestampGranularities: import_v417.z.array(import_v417.z.enum(["word", "segment"])).default(["segment"]).optional()
3880
+ timestampGranularities: import_v418.z.array(import_v418.z.enum(["word", "segment"])).default(["segment"]).optional()
3691
3881
  });
3692
3882
 
3693
3883
  // src/transcription/openai-transcription-model.ts
@@ -3765,15 +3955,15 @@ var OpenAITranscriptionModel = class {
3765
3955
  providerOptions
3766
3956
  }) {
3767
3957
  const warnings = [];
3768
- const openAIOptions = await (0, import_provider_utils15.parseProviderOptions)({
3958
+ const openAIOptions = await (0, import_provider_utils16.parseProviderOptions)({
3769
3959
  provider: "openai",
3770
3960
  providerOptions,
3771
3961
  schema: openAITranscriptionProviderOptions
3772
3962
  });
3773
3963
  const formData = new FormData();
3774
- const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils15.convertBase64ToUint8Array)(audio)]);
3964
+ const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils16.convertBase64ToUint8Array)(audio)]);
3775
3965
  formData.append("model", this.modelId);
3776
- const fileExtension = (0, import_provider_utils15.mediaTypeToExtension)(mediaType);
3966
+ const fileExtension = (0, import_provider_utils16.mediaTypeToExtension)(mediaType);
3777
3967
  formData.append(
3778
3968
  "file",
3779
3969
  new File([blob], "audio", { type: mediaType }),
@@ -3818,15 +4008,15 @@ var OpenAITranscriptionModel = class {
3818
4008
  value: response,
3819
4009
  responseHeaders,
3820
4010
  rawValue: rawResponse
3821
- } = await (0, import_provider_utils15.postFormDataToApi)({
4011
+ } = await (0, import_provider_utils16.postFormDataToApi)({
3822
4012
  url: this.config.url({
3823
4013
  path: "/audio/transcriptions",
3824
4014
  modelId: this.modelId
3825
4015
  }),
3826
- headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
4016
+ headers: (0, import_provider_utils16.combineHeaders)(this.config.headers(), options.headers),
3827
4017
  formData,
3828
4018
  failedResponseHandler: openaiFailedResponseHandler,
3829
- successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
4019
+ successfulResponseHandler: (0, import_provider_utils16.createJsonResponseHandler)(
3830
4020
  openaiTranscriptionResponseSchema
3831
4021
  ),
3832
4022
  abortSignal: options.abortSignal,
@@ -3856,44 +4046,44 @@ var OpenAITranscriptionModel = class {
3856
4046
  };
3857
4047
  }
3858
4048
  };
3859
- var openaiTranscriptionResponseSchema = import_v418.z.object({
3860
- text: import_v418.z.string(),
3861
- language: import_v418.z.string().nullish(),
3862
- duration: import_v418.z.number().nullish(),
3863
- words: import_v418.z.array(
3864
- import_v418.z.object({
3865
- word: import_v418.z.string(),
3866
- start: import_v418.z.number(),
3867
- end: import_v418.z.number()
4049
+ var openaiTranscriptionResponseSchema = import_v419.z.object({
4050
+ text: import_v419.z.string(),
4051
+ language: import_v419.z.string().nullish(),
4052
+ duration: import_v419.z.number().nullish(),
4053
+ words: import_v419.z.array(
4054
+ import_v419.z.object({
4055
+ word: import_v419.z.string(),
4056
+ start: import_v419.z.number(),
4057
+ end: import_v419.z.number()
3868
4058
  })
3869
4059
  ).nullish(),
3870
- segments: import_v418.z.array(
3871
- import_v418.z.object({
3872
- id: import_v418.z.number(),
3873
- seek: import_v418.z.number(),
3874
- start: import_v418.z.number(),
3875
- end: import_v418.z.number(),
3876
- text: import_v418.z.string(),
3877
- tokens: import_v418.z.array(import_v418.z.number()),
3878
- temperature: import_v418.z.number(),
3879
- avg_logprob: import_v418.z.number(),
3880
- compression_ratio: import_v418.z.number(),
3881
- no_speech_prob: import_v418.z.number()
4060
+ segments: import_v419.z.array(
4061
+ import_v419.z.object({
4062
+ id: import_v419.z.number(),
4063
+ seek: import_v419.z.number(),
4064
+ start: import_v419.z.number(),
4065
+ end: import_v419.z.number(),
4066
+ text: import_v419.z.string(),
4067
+ tokens: import_v419.z.array(import_v419.z.number()),
4068
+ temperature: import_v419.z.number(),
4069
+ avg_logprob: import_v419.z.number(),
4070
+ compression_ratio: import_v419.z.number(),
4071
+ no_speech_prob: import_v419.z.number()
3882
4072
  })
3883
4073
  ).nullish()
3884
4074
  });
3885
4075
 
3886
4076
  // src/version.ts
3887
- var VERSION = true ? "2.1.0-beta.6" : "0.0.0-test";
4077
+ var VERSION = true ? "2.1.0-beta.8" : "0.0.0-test";
3888
4078
 
3889
4079
  // src/openai-provider.ts
3890
4080
  function createOpenAI(options = {}) {
3891
4081
  var _a, _b;
3892
- const baseURL = (_a = (0, import_provider_utils16.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
4082
+ const baseURL = (_a = (0, import_provider_utils17.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
3893
4083
  const providerName = (_b = options.name) != null ? _b : "openai";
3894
- const getHeaders = () => (0, import_provider_utils16.withUserAgentSuffix)(
4084
+ const getHeaders = () => (0, import_provider_utils17.withUserAgentSuffix)(
3895
4085
  {
3896
- Authorization: `Bearer ${(0, import_provider_utils16.loadApiKey)({
4086
+ Authorization: `Bearer ${(0, import_provider_utils17.loadApiKey)({
3897
4087
  apiKey: options.apiKey,
3898
4088
  environmentVariableName: "OPENAI_API_KEY",
3899
4089
  description: "OpenAI"