@ai-sdk/openai 2.1.0-beta.0 → 2.1.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,7 +1,9 @@
1
1
  // src/openai-provider.ts
2
2
  import {
3
3
  loadApiKey,
4
- withoutTrailingSlash
4
+ loadOptionalSetting,
5
+ withoutTrailingSlash,
6
+ withUserAgentSuffix
5
7
  } from "@ai-sdk/provider-utils";
6
8
 
7
9
  // src/chat/openai-chat-language-model.ts
@@ -250,7 +252,7 @@ function mapOpenAIFinishReason(finishReason) {
250
252
 
251
253
  // src/chat/openai-chat-options.ts
252
254
  import { z as z2 } from "zod/v4";
253
- var openaiProviderOptions = z2.object({
255
+ var openaiChatLanguageModelOptions = z2.object({
254
256
  /**
255
257
  * Modify the likelihood of specified tokens appearing in the completion.
256
258
  *
@@ -404,7 +406,7 @@ function prepareChatTools({
404
406
  // src/chat/openai-chat-language-model.ts
405
407
  var OpenAIChatLanguageModel = class {
406
408
  constructor(modelId, config) {
407
- this.specificationVersion = "v2";
409
+ this.specificationVersion = "v3";
408
410
  this.supportedUrls = {
409
411
  "image/*": [/^https?:\/\/.*$/]
410
412
  };
@@ -434,7 +436,7 @@ var OpenAIChatLanguageModel = class {
434
436
  const openaiOptions = (_a = await parseProviderOptions({
435
437
  provider: "openai",
436
438
  providerOptions,
437
- schema: openaiProviderOptions
439
+ schema: openaiChatLanguageModelOptions
438
440
  })) != null ? _a : {};
439
441
  const structuredOutputs = (_b = openaiOptions.structuredOutputs) != null ? _b : true;
440
442
  if (topK != null) {
@@ -1222,7 +1224,7 @@ var openaiCompletionProviderOptions = z4.object({
1222
1224
  // src/completion/openai-completion-language-model.ts
1223
1225
  var OpenAICompletionLanguageModel = class {
1224
1226
  constructor(modelId, config) {
1225
- this.specificationVersion = "v2";
1227
+ this.specificationVersion = "v3";
1226
1228
  this.supportedUrls = {
1227
1229
  // No URLs are supported for completion models.
1228
1230
  };
@@ -1522,7 +1524,7 @@ var openaiEmbeddingProviderOptions = z6.object({
1522
1524
  // src/embedding/openai-embedding-model.ts
1523
1525
  var OpenAIEmbeddingModel = class {
1524
1526
  constructor(modelId, config) {
1525
- this.specificationVersion = "v2";
1527
+ this.specificationVersion = "v3";
1526
1528
  this.maxEmbeddingsPerCall = 2048;
1527
1529
  this.supportsParallelCalls = true;
1528
1530
  this.modelId = modelId;
@@ -1608,7 +1610,7 @@ var OpenAIImageModel = class {
1608
1610
  constructor(modelId, config) {
1609
1611
  this.modelId = modelId;
1610
1612
  this.config = config;
1611
- this.specificationVersion = "v2";
1613
+ this.specificationVersion = "v3";
1612
1614
  }
1613
1615
  get maxImagesPerCall() {
1614
1616
  var _a;
@@ -1792,39 +1794,62 @@ var imageGeneration = (args = {}) => {
1792
1794
  return imageGenerationToolFactory(args);
1793
1795
  };
1794
1796
 
1797
+ // src/tool/local-shell.ts
1798
+ import { createProviderDefinedToolFactoryWithOutputSchema as createProviderDefinedToolFactoryWithOutputSchema4 } from "@ai-sdk/provider-utils";
1799
+ import { z as z12 } from "zod/v4";
1800
+ var localShellInputSchema = z12.object({
1801
+ action: z12.object({
1802
+ type: z12.literal("exec"),
1803
+ command: z12.array(z12.string()),
1804
+ timeoutMs: z12.number().optional(),
1805
+ user: z12.string().optional(),
1806
+ workingDirectory: z12.string().optional(),
1807
+ env: z12.record(z12.string(), z12.string()).optional()
1808
+ })
1809
+ });
1810
+ var localShellOutputSchema = z12.object({
1811
+ output: z12.string()
1812
+ });
1813
+ var localShell = createProviderDefinedToolFactoryWithOutputSchema4({
1814
+ id: "openai.local_shell",
1815
+ name: "local_shell",
1816
+ inputSchema: localShellInputSchema,
1817
+ outputSchema: localShellOutputSchema
1818
+ });
1819
+
1795
1820
  // src/tool/web-search.ts
1796
1821
  import { createProviderDefinedToolFactory } from "@ai-sdk/provider-utils";
1797
- import { z as z12 } from "zod/v4";
1798
- var webSearchArgsSchema = z12.object({
1799
- filters: z12.object({
1800
- allowedDomains: z12.array(z12.string()).optional()
1822
+ import { z as z13 } from "zod/v4";
1823
+ var webSearchArgsSchema = z13.object({
1824
+ filters: z13.object({
1825
+ allowedDomains: z13.array(z13.string()).optional()
1801
1826
  }).optional(),
1802
- searchContextSize: z12.enum(["low", "medium", "high"]).optional(),
1803
- userLocation: z12.object({
1804
- type: z12.literal("approximate"),
1805
- country: z12.string().optional(),
1806
- city: z12.string().optional(),
1807
- region: z12.string().optional(),
1808
- timezone: z12.string().optional()
1827
+ searchContextSize: z13.enum(["low", "medium", "high"]).optional(),
1828
+ userLocation: z13.object({
1829
+ type: z13.literal("approximate"),
1830
+ country: z13.string().optional(),
1831
+ city: z13.string().optional(),
1832
+ region: z13.string().optional(),
1833
+ timezone: z13.string().optional()
1809
1834
  }).optional()
1810
1835
  });
1811
1836
  var webSearchToolFactory = createProviderDefinedToolFactory({
1812
1837
  id: "openai.web_search",
1813
1838
  name: "web_search",
1814
- inputSchema: z12.object({
1815
- action: z12.discriminatedUnion("type", [
1816
- z12.object({
1817
- type: z12.literal("search"),
1818
- query: z12.string().nullish()
1839
+ inputSchema: z13.object({
1840
+ action: z13.discriminatedUnion("type", [
1841
+ z13.object({
1842
+ type: z13.literal("search"),
1843
+ query: z13.string().nullish()
1819
1844
  }),
1820
- z12.object({
1821
- type: z12.literal("open_page"),
1822
- url: z12.string()
1845
+ z13.object({
1846
+ type: z13.literal("open_page"),
1847
+ url: z13.string()
1823
1848
  }),
1824
- z12.object({
1825
- type: z12.literal("find"),
1826
- url: z12.string(),
1827
- pattern: z12.string()
1849
+ z13.object({
1850
+ type: z13.literal("find"),
1851
+ url: z13.string(),
1852
+ pattern: z13.string()
1828
1853
  })
1829
1854
  ]).nullish()
1830
1855
  })
@@ -1835,58 +1860,58 @@ var webSearch = (args = {}) => {
1835
1860
 
1836
1861
  // src/tool/web-search-preview.ts
1837
1862
  import { createProviderDefinedToolFactory as createProviderDefinedToolFactory2 } from "@ai-sdk/provider-utils";
1838
- import { z as z13 } from "zod/v4";
1839
- var webSearchPreviewArgsSchema = z13.object({
1863
+ import { z as z14 } from "zod/v4";
1864
+ var webSearchPreviewArgsSchema = z14.object({
1840
1865
  /**
1841
1866
  * Search context size to use for the web search.
1842
1867
  * - high: Most comprehensive context, highest cost, slower response
1843
1868
  * - medium: Balanced context, cost, and latency (default)
1844
1869
  * - low: Least context, lowest cost, fastest response
1845
1870
  */
1846
- searchContextSize: z13.enum(["low", "medium", "high"]).optional(),
1871
+ searchContextSize: z14.enum(["low", "medium", "high"]).optional(),
1847
1872
  /**
1848
1873
  * User location information to provide geographically relevant search results.
1849
1874
  */
1850
- userLocation: z13.object({
1875
+ userLocation: z14.object({
1851
1876
  /**
1852
1877
  * Type of location (always 'approximate')
1853
1878
  */
1854
- type: z13.literal("approximate"),
1879
+ type: z14.literal("approximate"),
1855
1880
  /**
1856
1881
  * Two-letter ISO country code (e.g., 'US', 'GB')
1857
1882
  */
1858
- country: z13.string().optional(),
1883
+ country: z14.string().optional(),
1859
1884
  /**
1860
1885
  * City name (free text, e.g., 'Minneapolis')
1861
1886
  */
1862
- city: z13.string().optional(),
1887
+ city: z14.string().optional(),
1863
1888
  /**
1864
1889
  * Region name (free text, e.g., 'Minnesota')
1865
1890
  */
1866
- region: z13.string().optional(),
1891
+ region: z14.string().optional(),
1867
1892
  /**
1868
1893
  * IANA timezone (e.g., 'America/Chicago')
1869
1894
  */
1870
- timezone: z13.string().optional()
1895
+ timezone: z14.string().optional()
1871
1896
  }).optional()
1872
1897
  });
1873
1898
  var webSearchPreview = createProviderDefinedToolFactory2({
1874
1899
  id: "openai.web_search_preview",
1875
1900
  name: "web_search_preview",
1876
- inputSchema: z13.object({
1877
- action: z13.discriminatedUnion("type", [
1878
- z13.object({
1879
- type: z13.literal("search"),
1880
- query: z13.string().nullish()
1901
+ inputSchema: z14.object({
1902
+ action: z14.discriminatedUnion("type", [
1903
+ z14.object({
1904
+ type: z14.literal("search"),
1905
+ query: z14.string().nullish()
1881
1906
  }),
1882
- z13.object({
1883
- type: z13.literal("open_page"),
1884
- url: z13.string()
1907
+ z14.object({
1908
+ type: z14.literal("open_page"),
1909
+ url: z14.string()
1885
1910
  }),
1886
- z13.object({
1887
- type: z13.literal("find"),
1888
- url: z13.string(),
1889
- pattern: z13.string()
1911
+ z14.object({
1912
+ type: z14.literal("find"),
1913
+ url: z14.string(),
1914
+ pattern: z14.string()
1890
1915
  })
1891
1916
  ]).nullish()
1892
1917
  })
@@ -1931,6 +1956,15 @@ var openaiTools = {
1931
1956
  * @param background - Transparent or opaque
1932
1957
  */
1933
1958
  imageGeneration,
1959
+ /**
1960
+ * Local shell is a tool that allows agents to run shell commands locally
1961
+ * on a machine you or the user provides.
1962
+ *
1963
+ * Supported models: `gpt-5-codex` and `codex-mini-latest`
1964
+ *
1965
+ * Must have name `local_shell`.
1966
+ */
1967
+ localShell,
1934
1968
  /**
1935
1969
  * Web search allows models to access up-to-date information from the internet
1936
1970
  * and provide answers with sourced citations.
@@ -1968,14 +2002,14 @@ import {
1968
2002
  parseProviderOptions as parseProviderOptions5,
1969
2003
  postJsonToApi as postJsonToApi5
1970
2004
  } from "@ai-sdk/provider-utils";
1971
- import { z as z15 } from "zod/v4";
2005
+ import { z as z16 } from "zod/v4";
1972
2006
 
1973
2007
  // src/responses/convert-to-openai-responses-input.ts
1974
2008
  import {
1975
2009
  UnsupportedFunctionalityError as UnsupportedFunctionalityError4
1976
2010
  } from "@ai-sdk/provider";
1977
2011
  import { convertToBase64 as convertToBase642, parseProviderOptions as parseProviderOptions4 } from "@ai-sdk/provider-utils";
1978
- import { z as z14 } from "zod/v4";
2012
+ import { z as z15 } from "zod/v4";
1979
2013
  function isFileId(data, prefixes) {
1980
2014
  if (!prefixes) return false;
1981
2015
  return prefixes.some((prefix) => data.startsWith(prefix));
@@ -1984,9 +2018,10 @@ async function convertToOpenAIResponsesInput({
1984
2018
  prompt,
1985
2019
  systemMessageMode,
1986
2020
  fileIdPrefixes,
1987
- store
2021
+ store,
2022
+ hasLocalShellTool = false
1988
2023
  }) {
1989
- var _a, _b, _c, _d, _e, _f;
2024
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i;
1990
2025
  const input = [];
1991
2026
  const warnings = [];
1992
2027
  for (const { role, content } of prompt) {
@@ -2079,15 +2114,33 @@ async function convertToOpenAIResponsesInput({
2079
2114
  if (part.providerExecuted) {
2080
2115
  break;
2081
2116
  }
2117
+ if (hasLocalShellTool && part.toolName === "local_shell") {
2118
+ const parsedInput = localShellInputSchema.parse(part.input);
2119
+ input.push({
2120
+ type: "local_shell_call",
2121
+ call_id: part.toolCallId,
2122
+ id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0,
2123
+ action: {
2124
+ type: "exec",
2125
+ command: parsedInput.action.command,
2126
+ timeout_ms: parsedInput.action.timeoutMs,
2127
+ user: parsedInput.action.user,
2128
+ working_directory: parsedInput.action.workingDirectory,
2129
+ env: parsedInput.action.env
2130
+ }
2131
+ });
2132
+ break;
2133
+ }
2082
2134
  input.push({
2083
2135
  type: "function_call",
2084
2136
  call_id: part.toolCallId,
2085
2137
  name: part.toolName,
2086
2138
  arguments: JSON.stringify(part.input),
2087
- id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0
2139
+ id: (_i = (_h = (_g = part.providerOptions) == null ? void 0 : _g.openai) == null ? void 0 : _h.itemId) != null ? _i : void 0
2088
2140
  });
2089
2141
  break;
2090
2142
  }
2143
+ // assistant tool result parts are from provider-executed tools:
2091
2144
  case "tool-result": {
2092
2145
  if (store) {
2093
2146
  input.push({ type: "item_reference", id: part.toolCallId });
@@ -2107,26 +2160,40 @@ async function convertToOpenAIResponsesInput({
2107
2160
  });
2108
2161
  const reasoningId = providerOptions == null ? void 0 : providerOptions.itemId;
2109
2162
  if (reasoningId != null) {
2110
- const existingReasoningMessage = reasoningMessages[reasoningId];
2111
- const summaryParts = [];
2112
- if (part.text.length > 0) {
2113
- summaryParts.push({ type: "summary_text", text: part.text });
2114
- } else if (existingReasoningMessage !== void 0) {
2115
- warnings.push({
2116
- type: "other",
2117
- message: `Cannot append empty reasoning part to existing reasoning sequence. Skipping reasoning part: ${JSON.stringify(part)}.`
2118
- });
2119
- }
2120
- if (existingReasoningMessage === void 0) {
2121
- reasoningMessages[reasoningId] = {
2122
- type: "reasoning",
2123
- id: reasoningId,
2124
- encrypted_content: providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent,
2125
- summary: summaryParts
2126
- };
2127
- input.push(reasoningMessages[reasoningId]);
2163
+ const reasoningMessage = reasoningMessages[reasoningId];
2164
+ if (store) {
2165
+ if (reasoningMessage === void 0) {
2166
+ input.push({ type: "item_reference", id: reasoningId });
2167
+ reasoningMessages[reasoningId] = {
2168
+ type: "reasoning",
2169
+ id: reasoningId,
2170
+ summary: []
2171
+ };
2172
+ }
2128
2173
  } else {
2129
- existingReasoningMessage.summary.push(...summaryParts);
2174
+ const summaryParts = [];
2175
+ if (part.text.length > 0) {
2176
+ summaryParts.push({
2177
+ type: "summary_text",
2178
+ text: part.text
2179
+ });
2180
+ } else if (reasoningMessage !== void 0) {
2181
+ warnings.push({
2182
+ type: "other",
2183
+ message: `Cannot append empty reasoning part to existing reasoning sequence. Skipping reasoning part: ${JSON.stringify(part)}.`
2184
+ });
2185
+ }
2186
+ if (reasoningMessage === void 0) {
2187
+ reasoningMessages[reasoningId] = {
2188
+ type: "reasoning",
2189
+ id: reasoningId,
2190
+ encrypted_content: providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent,
2191
+ summary: summaryParts
2192
+ };
2193
+ input.push(reasoningMessages[reasoningId]);
2194
+ } else {
2195
+ reasoningMessage.summary.push(...summaryParts);
2196
+ }
2130
2197
  }
2131
2198
  } else {
2132
2199
  warnings.push({
@@ -2143,6 +2210,14 @@ async function convertToOpenAIResponsesInput({
2143
2210
  case "tool": {
2144
2211
  for (const part of content) {
2145
2212
  const output = part.output;
2213
+ if (hasLocalShellTool && part.toolName === "local_shell" && output.type === "json") {
2214
+ input.push({
2215
+ type: "local_shell_call_output",
2216
+ call_id: part.toolCallId,
2217
+ output: localShellOutputSchema.parse(output.value).output
2218
+ });
2219
+ break;
2220
+ }
2146
2221
  let contentValue;
2147
2222
  switch (output.type) {
2148
2223
  case "text":
@@ -2171,9 +2246,9 @@ async function convertToOpenAIResponsesInput({
2171
2246
  }
2172
2247
  return { input, warnings };
2173
2248
  }
2174
- var openaiResponsesReasoningProviderOptionsSchema = z14.object({
2175
- itemId: z14.string().nullish(),
2176
- reasoningEncryptedContent: z14.string().nullish()
2249
+ var openaiResponsesReasoningProviderOptionsSchema = z15.object({
2250
+ itemId: z15.string().nullish(),
2251
+ reasoningEncryptedContent: z15.string().nullish()
2177
2252
  });
2178
2253
 
2179
2254
  // src/responses/map-openai-responses-finish-reason.ts
@@ -2236,6 +2311,12 @@ function prepareResponsesTools({
2236
2311
  });
2237
2312
  break;
2238
2313
  }
2314
+ case "openai.local_shell": {
2315
+ openaiTools2.push({
2316
+ type: "local_shell"
2317
+ });
2318
+ break;
2319
+ }
2239
2320
  case "openai.web_search_preview": {
2240
2321
  const args = webSearchPreviewArgsSchema.parse(tool.args);
2241
2322
  openaiTools2.push({
@@ -2315,73 +2396,86 @@ function prepareResponsesTools({
2315
2396
  }
2316
2397
 
2317
2398
  // src/responses/openai-responses-language-model.ts
2318
- var webSearchCallItem = z15.object({
2319
- type: z15.literal("web_search_call"),
2320
- id: z15.string(),
2321
- status: z15.string(),
2322
- action: z15.discriminatedUnion("type", [
2323
- z15.object({
2324
- type: z15.literal("search"),
2325
- query: z15.string().nullish()
2399
+ var webSearchCallItem = z16.object({
2400
+ type: z16.literal("web_search_call"),
2401
+ id: z16.string(),
2402
+ status: z16.string(),
2403
+ action: z16.discriminatedUnion("type", [
2404
+ z16.object({
2405
+ type: z16.literal("search"),
2406
+ query: z16.string().nullish()
2326
2407
  }),
2327
- z15.object({
2328
- type: z15.literal("open_page"),
2329
- url: z15.string()
2408
+ z16.object({
2409
+ type: z16.literal("open_page"),
2410
+ url: z16.string()
2330
2411
  }),
2331
- z15.object({
2332
- type: z15.literal("find"),
2333
- url: z15.string(),
2334
- pattern: z15.string()
2412
+ z16.object({
2413
+ type: z16.literal("find"),
2414
+ url: z16.string(),
2415
+ pattern: z16.string()
2335
2416
  })
2336
2417
  ]).nullish()
2337
2418
  });
2338
- var fileSearchCallItem = z15.object({
2339
- type: z15.literal("file_search_call"),
2340
- id: z15.string(),
2341
- queries: z15.array(z15.string()),
2342
- results: z15.array(
2343
- z15.object({
2344
- attributes: z15.record(z15.string(), z15.unknown()),
2345
- file_id: z15.string(),
2346
- filename: z15.string(),
2347
- score: z15.number(),
2348
- text: z15.string()
2419
+ var fileSearchCallItem = z16.object({
2420
+ type: z16.literal("file_search_call"),
2421
+ id: z16.string(),
2422
+ queries: z16.array(z16.string()),
2423
+ results: z16.array(
2424
+ z16.object({
2425
+ attributes: z16.record(z16.string(), z16.unknown()),
2426
+ file_id: z16.string(),
2427
+ filename: z16.string(),
2428
+ score: z16.number(),
2429
+ text: z16.string()
2349
2430
  })
2350
2431
  ).nullish()
2351
2432
  });
2352
- var codeInterpreterCallItem = z15.object({
2353
- type: z15.literal("code_interpreter_call"),
2354
- id: z15.string(),
2355
- code: z15.string().nullable(),
2356
- container_id: z15.string(),
2357
- outputs: z15.array(
2358
- z15.discriminatedUnion("type", [
2359
- z15.object({ type: z15.literal("logs"), logs: z15.string() }),
2360
- z15.object({ type: z15.literal("image"), url: z15.string() })
2433
+ var codeInterpreterCallItem = z16.object({
2434
+ type: z16.literal("code_interpreter_call"),
2435
+ id: z16.string(),
2436
+ code: z16.string().nullable(),
2437
+ container_id: z16.string(),
2438
+ outputs: z16.array(
2439
+ z16.discriminatedUnion("type", [
2440
+ z16.object({ type: z16.literal("logs"), logs: z16.string() }),
2441
+ z16.object({ type: z16.literal("image"), url: z16.string() })
2361
2442
  ])
2362
2443
  ).nullable()
2363
2444
  });
2364
- var imageGenerationCallItem = z15.object({
2365
- type: z15.literal("image_generation_call"),
2366
- id: z15.string(),
2367
- result: z15.string()
2445
+ var localShellCallItem = z16.object({
2446
+ type: z16.literal("local_shell_call"),
2447
+ id: z16.string(),
2448
+ call_id: z16.string(),
2449
+ action: z16.object({
2450
+ type: z16.literal("exec"),
2451
+ command: z16.array(z16.string()),
2452
+ timeout_ms: z16.number().optional(),
2453
+ user: z16.string().optional(),
2454
+ working_directory: z16.string().optional(),
2455
+ env: z16.record(z16.string(), z16.string()).optional()
2456
+ })
2457
+ });
2458
+ var imageGenerationCallItem = z16.object({
2459
+ type: z16.literal("image_generation_call"),
2460
+ id: z16.string(),
2461
+ result: z16.string()
2368
2462
  });
2369
2463
  var TOP_LOGPROBS_MAX = 20;
2370
- var LOGPROBS_SCHEMA = z15.array(
2371
- z15.object({
2372
- token: z15.string(),
2373
- logprob: z15.number(),
2374
- top_logprobs: z15.array(
2375
- z15.object({
2376
- token: z15.string(),
2377
- logprob: z15.number()
2464
+ var LOGPROBS_SCHEMA = z16.array(
2465
+ z16.object({
2466
+ token: z16.string(),
2467
+ logprob: z16.number(),
2468
+ top_logprobs: z16.array(
2469
+ z16.object({
2470
+ token: z16.string(),
2471
+ logprob: z16.number()
2378
2472
  })
2379
2473
  )
2380
2474
  })
2381
2475
  );
2382
2476
  var OpenAIResponsesLanguageModel = class {
2383
2477
  constructor(modelId, config) {
2384
- this.specificationVersion = "v2";
2478
+ this.specificationVersion = "v3";
2385
2479
  this.supportedUrls = {
2386
2480
  "image/*": [/^https?:\/\/.*$/],
2387
2481
  "application/pdf": [/^https?:\/\/.*$/]
@@ -2440,7 +2534,8 @@ var OpenAIResponsesLanguageModel = class {
2440
2534
  prompt,
2441
2535
  systemMessageMode: modelConfig.systemMessageMode,
2442
2536
  fileIdPrefixes: this.config.fileIdPrefixes,
2443
- store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true
2537
+ store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true,
2538
+ hasLocalShellTool: hasOpenAITool("openai.local_shell")
2444
2539
  });
2445
2540
  warnings.push(...inputWarnings);
2446
2541
  const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
@@ -2605,45 +2700,45 @@ var OpenAIResponsesLanguageModel = class {
2605
2700
  body,
2606
2701
  failedResponseHandler: openaiFailedResponseHandler,
2607
2702
  successfulResponseHandler: createJsonResponseHandler5(
2608
- z15.object({
2609
- id: z15.string(),
2610
- created_at: z15.number(),
2611
- error: z15.object({
2612
- code: z15.string(),
2613
- message: z15.string()
2703
+ z16.object({
2704
+ id: z16.string(),
2705
+ created_at: z16.number(),
2706
+ error: z16.object({
2707
+ code: z16.string(),
2708
+ message: z16.string()
2614
2709
  }).nullish(),
2615
- model: z15.string(),
2616
- output: z15.array(
2617
- z15.discriminatedUnion("type", [
2618
- z15.object({
2619
- type: z15.literal("message"),
2620
- role: z15.literal("assistant"),
2621
- id: z15.string(),
2622
- content: z15.array(
2623
- z15.object({
2624
- type: z15.literal("output_text"),
2625
- text: z15.string(),
2710
+ model: z16.string(),
2711
+ output: z16.array(
2712
+ z16.discriminatedUnion("type", [
2713
+ z16.object({
2714
+ type: z16.literal("message"),
2715
+ role: z16.literal("assistant"),
2716
+ id: z16.string(),
2717
+ content: z16.array(
2718
+ z16.object({
2719
+ type: z16.literal("output_text"),
2720
+ text: z16.string(),
2626
2721
  logprobs: LOGPROBS_SCHEMA.nullish(),
2627
- annotations: z15.array(
2628
- z15.discriminatedUnion("type", [
2629
- z15.object({
2630
- type: z15.literal("url_citation"),
2631
- start_index: z15.number(),
2632
- end_index: z15.number(),
2633
- url: z15.string(),
2634
- title: z15.string()
2722
+ annotations: z16.array(
2723
+ z16.discriminatedUnion("type", [
2724
+ z16.object({
2725
+ type: z16.literal("url_citation"),
2726
+ start_index: z16.number(),
2727
+ end_index: z16.number(),
2728
+ url: z16.string(),
2729
+ title: z16.string()
2635
2730
  }),
2636
- z15.object({
2637
- type: z15.literal("file_citation"),
2638
- file_id: z15.string(),
2639
- filename: z15.string().nullish(),
2640
- index: z15.number().nullish(),
2641
- start_index: z15.number().nullish(),
2642
- end_index: z15.number().nullish(),
2643
- quote: z15.string().nullish()
2731
+ z16.object({
2732
+ type: z16.literal("file_citation"),
2733
+ file_id: z16.string(),
2734
+ filename: z16.string().nullish(),
2735
+ index: z16.number().nullish(),
2736
+ start_index: z16.number().nullish(),
2737
+ end_index: z16.number().nullish(),
2738
+ quote: z16.string().nullish()
2644
2739
  }),
2645
- z15.object({
2646
- type: z15.literal("container_file_citation")
2740
+ z16.object({
2741
+ type: z16.literal("container_file_citation")
2647
2742
  })
2648
2743
  ])
2649
2744
  )
@@ -2654,33 +2749,34 @@ var OpenAIResponsesLanguageModel = class {
2654
2749
  fileSearchCallItem,
2655
2750
  codeInterpreterCallItem,
2656
2751
  imageGenerationCallItem,
2657
- z15.object({
2658
- type: z15.literal("function_call"),
2659
- call_id: z15.string(),
2660
- name: z15.string(),
2661
- arguments: z15.string(),
2662
- id: z15.string()
2752
+ localShellCallItem,
2753
+ z16.object({
2754
+ type: z16.literal("function_call"),
2755
+ call_id: z16.string(),
2756
+ name: z16.string(),
2757
+ arguments: z16.string(),
2758
+ id: z16.string()
2663
2759
  }),
2664
- z15.object({
2665
- type: z15.literal("computer_call"),
2666
- id: z15.string(),
2667
- status: z15.string().optional()
2760
+ z16.object({
2761
+ type: z16.literal("computer_call"),
2762
+ id: z16.string(),
2763
+ status: z16.string().optional()
2668
2764
  }),
2669
- z15.object({
2670
- type: z15.literal("reasoning"),
2671
- id: z15.string(),
2672
- encrypted_content: z15.string().nullish(),
2673
- summary: z15.array(
2674
- z15.object({
2675
- type: z15.literal("summary_text"),
2676
- text: z15.string()
2765
+ z16.object({
2766
+ type: z16.literal("reasoning"),
2767
+ id: z16.string(),
2768
+ encrypted_content: z16.string().nullish(),
2769
+ summary: z16.array(
2770
+ z16.object({
2771
+ type: z16.literal("summary_text"),
2772
+ text: z16.string()
2677
2773
  })
2678
2774
  )
2679
2775
  })
2680
2776
  ])
2681
2777
  ),
2682
- service_tier: z15.string().nullish(),
2683
- incomplete_details: z15.object({ reason: z15.string() }).nullable(),
2778
+ service_tier: z16.string().nullish(),
2779
+ incomplete_details: z16.object({ reason: z16.string() }).nullish(),
2684
2780
  usage: usageSchema2
2685
2781
  })
2686
2782
  ),
@@ -2740,6 +2836,20 @@ var OpenAIResponsesLanguageModel = class {
2740
2836
  });
2741
2837
  break;
2742
2838
  }
2839
+ case "local_shell_call": {
2840
+ content.push({
2841
+ type: "tool-call",
2842
+ toolCallId: part.call_id,
2843
+ toolName: "local_shell",
2844
+ input: JSON.stringify({ action: part.action }),
2845
+ providerMetadata: {
2846
+ openai: {
2847
+ itemId: part.id
2848
+ }
2849
+ }
2850
+ });
2851
+ break;
2852
+ }
2743
2853
  case "message": {
2744
2854
  for (const contentPart of part.content) {
2745
2855
  if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
@@ -2997,6 +3107,24 @@ var OpenAIResponsesLanguageModel = class {
2997
3107
  id: value.item.id,
2998
3108
  toolName: "computer_use"
2999
3109
  });
3110
+ } else if (value.item.type === "code_interpreter_call") {
3111
+ ongoingToolCalls[value.output_index] = {
3112
+ toolName: "code_interpreter",
3113
+ toolCallId: value.item.id,
3114
+ codeInterpreter: {
3115
+ containerId: value.item.container_id
3116
+ }
3117
+ };
3118
+ controller.enqueue({
3119
+ type: "tool-input-start",
3120
+ id: value.item.id,
3121
+ toolName: "code_interpreter"
3122
+ });
3123
+ controller.enqueue({
3124
+ type: "tool-input-delta",
3125
+ id: value.item.id,
3126
+ delta: `{"containerId":"${value.item.container_id}","code":"`
3127
+ });
3000
3128
  } else if (value.item.type === "file_search_call") {
3001
3129
  controller.enqueue({
3002
3130
  type: "tool-call",
@@ -3120,16 +3248,7 @@ var OpenAIResponsesLanguageModel = class {
3120
3248
  providerExecuted: true
3121
3249
  });
3122
3250
  } else if (value.item.type === "code_interpreter_call") {
3123
- controller.enqueue({
3124
- type: "tool-call",
3125
- toolCallId: value.item.id,
3126
- toolName: "code_interpreter",
3127
- input: JSON.stringify({
3128
- code: value.item.code,
3129
- containerId: value.item.container_id
3130
- }),
3131
- providerExecuted: true
3132
- });
3251
+ ongoingToolCalls[value.output_index] = void 0;
3133
3252
  controller.enqueue({
3134
3253
  type: "tool-result",
3135
3254
  toolCallId: value.item.id,
@@ -3149,6 +3268,26 @@ var OpenAIResponsesLanguageModel = class {
3149
3268
  },
3150
3269
  providerExecuted: true
3151
3270
  });
3271
+ } else if (value.item.type === "local_shell_call") {
3272
+ ongoingToolCalls[value.output_index] = void 0;
3273
+ controller.enqueue({
3274
+ type: "tool-call",
3275
+ toolCallId: value.item.call_id,
3276
+ toolName: "local_shell",
3277
+ input: JSON.stringify({
3278
+ action: {
3279
+ type: "exec",
3280
+ command: value.item.action.command,
3281
+ timeoutMs: value.item.action.timeout_ms,
3282
+ user: value.item.action.user,
3283
+ workingDirectory: value.item.action.working_directory,
3284
+ env: value.item.action.env
3285
+ }
3286
+ }),
3287
+ providerMetadata: {
3288
+ openai: { itemId: value.item.id }
3289
+ }
3290
+ });
3152
3291
  } else if (value.item.type === "message") {
3153
3292
  controller.enqueue({
3154
3293
  type: "text-end",
@@ -3179,6 +3318,40 @@ var OpenAIResponsesLanguageModel = class {
3179
3318
  delta: value.delta
3180
3319
  });
3181
3320
  }
3321
+ } else if (isResponseCodeInterpreterCallCodeDeltaChunk(value)) {
3322
+ const toolCall = ongoingToolCalls[value.output_index];
3323
+ if (toolCall != null) {
3324
+ controller.enqueue({
3325
+ type: "tool-input-delta",
3326
+ id: toolCall.toolCallId,
3327
+ // The delta is code, which is embedding in a JSON string.
3328
+ // To escape it, we use JSON.stringify and slice to remove the outer quotes.
3329
+ delta: JSON.stringify(value.delta).slice(1, -1)
3330
+ });
3331
+ }
3332
+ } else if (isResponseCodeInterpreterCallCodeDoneChunk(value)) {
3333
+ const toolCall = ongoingToolCalls[value.output_index];
3334
+ if (toolCall != null) {
3335
+ controller.enqueue({
3336
+ type: "tool-input-delta",
3337
+ id: toolCall.toolCallId,
3338
+ delta: '"}'
3339
+ });
3340
+ controller.enqueue({
3341
+ type: "tool-input-end",
3342
+ id: toolCall.toolCallId
3343
+ });
3344
+ controller.enqueue({
3345
+ type: "tool-call",
3346
+ toolCallId: toolCall.toolCallId,
3347
+ toolName: "code_interpreter",
3348
+ input: JSON.stringify({
3349
+ code: value.code,
3350
+ containerId: toolCall.codeInterpreter.containerId
3351
+ }),
3352
+ providerExecuted: true
3353
+ });
3354
+ }
3182
3355
  } else if (isResponseCreatedChunk(value)) {
3183
3356
  responseId = value.response.id;
3184
3357
  controller.enqueue({
@@ -3285,166 +3458,194 @@ var OpenAIResponsesLanguageModel = class {
3285
3458
  };
3286
3459
  }
3287
3460
  };
3288
- var usageSchema2 = z15.object({
3289
- input_tokens: z15.number(),
3290
- input_tokens_details: z15.object({ cached_tokens: z15.number().nullish() }).nullish(),
3291
- output_tokens: z15.number(),
3292
- output_tokens_details: z15.object({ reasoning_tokens: z15.number().nullish() }).nullish()
3461
+ var usageSchema2 = z16.object({
3462
+ input_tokens: z16.number(),
3463
+ input_tokens_details: z16.object({ cached_tokens: z16.number().nullish() }).nullish(),
3464
+ output_tokens: z16.number(),
3465
+ output_tokens_details: z16.object({ reasoning_tokens: z16.number().nullish() }).nullish()
3293
3466
  });
3294
- var textDeltaChunkSchema = z15.object({
3295
- type: z15.literal("response.output_text.delta"),
3296
- item_id: z15.string(),
3297
- delta: z15.string(),
3467
+ var textDeltaChunkSchema = z16.object({
3468
+ type: z16.literal("response.output_text.delta"),
3469
+ item_id: z16.string(),
3470
+ delta: z16.string(),
3298
3471
  logprobs: LOGPROBS_SCHEMA.nullish()
3299
3472
  });
3300
- var errorChunkSchema = z15.object({
3301
- type: z15.literal("error"),
3302
- code: z15.string(),
3303
- message: z15.string(),
3304
- param: z15.string().nullish(),
3305
- sequence_number: z15.number()
3473
+ var errorChunkSchema = z16.object({
3474
+ type: z16.literal("error"),
3475
+ code: z16.string(),
3476
+ message: z16.string(),
3477
+ param: z16.string().nullish(),
3478
+ sequence_number: z16.number()
3306
3479
  });
3307
- var responseFinishedChunkSchema = z15.object({
3308
- type: z15.enum(["response.completed", "response.incomplete"]),
3309
- response: z15.object({
3310
- incomplete_details: z15.object({ reason: z15.string() }).nullish(),
3480
+ var responseFinishedChunkSchema = z16.object({
3481
+ type: z16.enum(["response.completed", "response.incomplete"]),
3482
+ response: z16.object({
3483
+ incomplete_details: z16.object({ reason: z16.string() }).nullish(),
3311
3484
  usage: usageSchema2,
3312
- service_tier: z15.string().nullish()
3485
+ service_tier: z16.string().nullish()
3313
3486
  })
3314
3487
  });
3315
- var responseCreatedChunkSchema = z15.object({
3316
- type: z15.literal("response.created"),
3317
- response: z15.object({
3318
- id: z15.string(),
3319
- created_at: z15.number(),
3320
- model: z15.string(),
3321
- service_tier: z15.string().nullish()
3488
+ var responseCreatedChunkSchema = z16.object({
3489
+ type: z16.literal("response.created"),
3490
+ response: z16.object({
3491
+ id: z16.string(),
3492
+ created_at: z16.number(),
3493
+ model: z16.string(),
3494
+ service_tier: z16.string().nullish()
3322
3495
  })
3323
3496
  });
3324
- var responseOutputItemAddedSchema = z15.object({
3325
- type: z15.literal("response.output_item.added"),
3326
- output_index: z15.number(),
3327
- item: z15.discriminatedUnion("type", [
3328
- z15.object({
3329
- type: z15.literal("message"),
3330
- id: z15.string()
3497
+ var responseOutputItemAddedSchema = z16.object({
3498
+ type: z16.literal("response.output_item.added"),
3499
+ output_index: z16.number(),
3500
+ item: z16.discriminatedUnion("type", [
3501
+ z16.object({
3502
+ type: z16.literal("message"),
3503
+ id: z16.string()
3331
3504
  }),
3332
- z15.object({
3333
- type: z15.literal("reasoning"),
3334
- id: z15.string(),
3335
- encrypted_content: z15.string().nullish()
3505
+ z16.object({
3506
+ type: z16.literal("reasoning"),
3507
+ id: z16.string(),
3508
+ encrypted_content: z16.string().nullish()
3336
3509
  }),
3337
- z15.object({
3338
- type: z15.literal("function_call"),
3339
- id: z15.string(),
3340
- call_id: z15.string(),
3341
- name: z15.string(),
3342
- arguments: z15.string()
3510
+ z16.object({
3511
+ type: z16.literal("function_call"),
3512
+ id: z16.string(),
3513
+ call_id: z16.string(),
3514
+ name: z16.string(),
3515
+ arguments: z16.string()
3343
3516
  }),
3344
- z15.object({
3345
- type: z15.literal("web_search_call"),
3346
- id: z15.string(),
3347
- status: z15.string(),
3348
- action: z15.object({
3349
- type: z15.literal("search"),
3350
- query: z15.string().optional()
3517
+ z16.object({
3518
+ type: z16.literal("web_search_call"),
3519
+ id: z16.string(),
3520
+ status: z16.string(),
3521
+ action: z16.object({
3522
+ type: z16.literal("search"),
3523
+ query: z16.string().optional()
3351
3524
  }).nullish()
3352
3525
  }),
3353
- z15.object({
3354
- type: z15.literal("computer_call"),
3355
- id: z15.string(),
3356
- status: z15.string()
3526
+ z16.object({
3527
+ type: z16.literal("computer_call"),
3528
+ id: z16.string(),
3529
+ status: z16.string()
3357
3530
  }),
3358
- z15.object({
3359
- type: z15.literal("file_search_call"),
3360
- id: z15.string()
3531
+ z16.object({
3532
+ type: z16.literal("file_search_call"),
3533
+ id: z16.string()
3361
3534
  }),
3362
- z15.object({
3363
- type: z15.literal("image_generation_call"),
3364
- id: z15.string()
3535
+ z16.object({
3536
+ type: z16.literal("image_generation_call"),
3537
+ id: z16.string()
3538
+ }),
3539
+ z16.object({
3540
+ type: z16.literal("code_interpreter_call"),
3541
+ id: z16.string(),
3542
+ container_id: z16.string(),
3543
+ code: z16.string().nullable(),
3544
+ outputs: z16.array(
3545
+ z16.discriminatedUnion("type", [
3546
+ z16.object({ type: z16.literal("logs"), logs: z16.string() }),
3547
+ z16.object({ type: z16.literal("image"), url: z16.string() })
3548
+ ])
3549
+ ).nullable(),
3550
+ status: z16.string()
3365
3551
  })
3366
3552
  ])
3367
3553
  });
3368
- var responseOutputItemDoneSchema = z15.object({
3369
- type: z15.literal("response.output_item.done"),
3370
- output_index: z15.number(),
3371
- item: z15.discriminatedUnion("type", [
3372
- z15.object({
3373
- type: z15.literal("message"),
3374
- id: z15.string()
3554
+ var responseOutputItemDoneSchema = z16.object({
3555
+ type: z16.literal("response.output_item.done"),
3556
+ output_index: z16.number(),
3557
+ item: z16.discriminatedUnion("type", [
3558
+ z16.object({
3559
+ type: z16.literal("message"),
3560
+ id: z16.string()
3375
3561
  }),
3376
- z15.object({
3377
- type: z15.literal("reasoning"),
3378
- id: z15.string(),
3379
- encrypted_content: z15.string().nullish()
3562
+ z16.object({
3563
+ type: z16.literal("reasoning"),
3564
+ id: z16.string(),
3565
+ encrypted_content: z16.string().nullish()
3380
3566
  }),
3381
- z15.object({
3382
- type: z15.literal("function_call"),
3383
- id: z15.string(),
3384
- call_id: z15.string(),
3385
- name: z15.string(),
3386
- arguments: z15.string(),
3387
- status: z15.literal("completed")
3567
+ z16.object({
3568
+ type: z16.literal("function_call"),
3569
+ id: z16.string(),
3570
+ call_id: z16.string(),
3571
+ name: z16.string(),
3572
+ arguments: z16.string(),
3573
+ status: z16.literal("completed")
3388
3574
  }),
3389
3575
  codeInterpreterCallItem,
3390
3576
  imageGenerationCallItem,
3391
3577
  webSearchCallItem,
3392
3578
  fileSearchCallItem,
3393
- z15.object({
3394
- type: z15.literal("computer_call"),
3395
- id: z15.string(),
3396
- status: z15.literal("completed")
3579
+ localShellCallItem,
3580
+ z16.object({
3581
+ type: z16.literal("computer_call"),
3582
+ id: z16.string(),
3583
+ status: z16.literal("completed")
3397
3584
  })
3398
3585
  ])
3399
3586
  });
3400
- var responseFunctionCallArgumentsDeltaSchema = z15.object({
3401
- type: z15.literal("response.function_call_arguments.delta"),
3402
- item_id: z15.string(),
3403
- output_index: z15.number(),
3404
- delta: z15.string()
3587
+ var responseFunctionCallArgumentsDeltaSchema = z16.object({
3588
+ type: z16.literal("response.function_call_arguments.delta"),
3589
+ item_id: z16.string(),
3590
+ output_index: z16.number(),
3591
+ delta: z16.string()
3592
+ });
3593
+ var responseCodeInterpreterCallCodeDeltaSchema = z16.object({
3594
+ type: z16.literal("response.code_interpreter_call_code.delta"),
3595
+ item_id: z16.string(),
3596
+ output_index: z16.number(),
3597
+ delta: z16.string()
3598
+ });
3599
+ var responseCodeInterpreterCallCodeDoneSchema = z16.object({
3600
+ type: z16.literal("response.code_interpreter_call_code.done"),
3601
+ item_id: z16.string(),
3602
+ output_index: z16.number(),
3603
+ code: z16.string()
3405
3604
  });
3406
- var responseAnnotationAddedSchema = z15.object({
3407
- type: z15.literal("response.output_text.annotation.added"),
3408
- annotation: z15.discriminatedUnion("type", [
3409
- z15.object({
3410
- type: z15.literal("url_citation"),
3411
- url: z15.string(),
3412
- title: z15.string()
3605
+ var responseAnnotationAddedSchema = z16.object({
3606
+ type: z16.literal("response.output_text.annotation.added"),
3607
+ annotation: z16.discriminatedUnion("type", [
3608
+ z16.object({
3609
+ type: z16.literal("url_citation"),
3610
+ url: z16.string(),
3611
+ title: z16.string()
3413
3612
  }),
3414
- z15.object({
3415
- type: z15.literal("file_citation"),
3416
- file_id: z15.string(),
3417
- filename: z15.string().nullish(),
3418
- index: z15.number().nullish(),
3419
- start_index: z15.number().nullish(),
3420
- end_index: z15.number().nullish(),
3421
- quote: z15.string().nullish()
3613
+ z16.object({
3614
+ type: z16.literal("file_citation"),
3615
+ file_id: z16.string(),
3616
+ filename: z16.string().nullish(),
3617
+ index: z16.number().nullish(),
3618
+ start_index: z16.number().nullish(),
3619
+ end_index: z16.number().nullish(),
3620
+ quote: z16.string().nullish()
3422
3621
  })
3423
3622
  ])
3424
3623
  });
3425
- var responseReasoningSummaryPartAddedSchema = z15.object({
3426
- type: z15.literal("response.reasoning_summary_part.added"),
3427
- item_id: z15.string(),
3428
- summary_index: z15.number()
3624
+ var responseReasoningSummaryPartAddedSchema = z16.object({
3625
+ type: z16.literal("response.reasoning_summary_part.added"),
3626
+ item_id: z16.string(),
3627
+ summary_index: z16.number()
3429
3628
  });
3430
- var responseReasoningSummaryTextDeltaSchema = z15.object({
3431
- type: z15.literal("response.reasoning_summary_text.delta"),
3432
- item_id: z15.string(),
3433
- summary_index: z15.number(),
3434
- delta: z15.string()
3629
+ var responseReasoningSummaryTextDeltaSchema = z16.object({
3630
+ type: z16.literal("response.reasoning_summary_text.delta"),
3631
+ item_id: z16.string(),
3632
+ summary_index: z16.number(),
3633
+ delta: z16.string()
3435
3634
  });
3436
- var openaiResponsesChunkSchema = z15.union([
3635
+ var openaiResponsesChunkSchema = z16.union([
3437
3636
  textDeltaChunkSchema,
3438
3637
  responseFinishedChunkSchema,
3439
3638
  responseCreatedChunkSchema,
3440
3639
  responseOutputItemAddedSchema,
3441
3640
  responseOutputItemDoneSchema,
3442
3641
  responseFunctionCallArgumentsDeltaSchema,
3642
+ responseCodeInterpreterCallCodeDeltaSchema,
3643
+ responseCodeInterpreterCallCodeDoneSchema,
3443
3644
  responseAnnotationAddedSchema,
3444
3645
  responseReasoningSummaryPartAddedSchema,
3445
3646
  responseReasoningSummaryTextDeltaSchema,
3446
3647
  errorChunkSchema,
3447
- z15.object({ type: z15.string() }).loose()
3648
+ z16.object({ type: z16.string() }).loose()
3448
3649
  // fallback for unknown chunks
3449
3650
  ]);
3450
3651
  function isTextDeltaChunk(chunk) {
@@ -3465,6 +3666,12 @@ function isResponseCreatedChunk(chunk) {
3465
3666
  function isResponseFunctionCallArgumentsDeltaChunk(chunk) {
3466
3667
  return chunk.type === "response.function_call_arguments.delta";
3467
3668
  }
3669
+ function isResponseCodeInterpreterCallCodeDeltaChunk(chunk) {
3670
+ return chunk.type === "response.code_interpreter_call_code.delta";
3671
+ }
3672
+ function isResponseCodeInterpreterCallCodeDoneChunk(chunk) {
3673
+ return chunk.type === "response.code_interpreter_call_code.done";
3674
+ }
3468
3675
  function isResponseOutputItemAddedChunk(chunk) {
3469
3676
  return chunk.type === "response.output_item.added";
3470
3677
  }
@@ -3517,15 +3724,15 @@ function getResponsesModelConfig(modelId) {
3517
3724
  isReasoningModel: false
3518
3725
  };
3519
3726
  }
3520
- var openaiResponsesProviderOptionsSchema = z15.object({
3521
- include: z15.array(
3522
- z15.enum([
3727
+ var openaiResponsesProviderOptionsSchema = z16.object({
3728
+ include: z16.array(
3729
+ z16.enum([
3523
3730
  "reasoning.encrypted_content",
3524
3731
  "file_search_call.results",
3525
3732
  "message.output_text.logprobs"
3526
3733
  ])
3527
3734
  ).nullish(),
3528
- instructions: z15.string().nullish(),
3735
+ instructions: z16.string().nullish(),
3529
3736
  /**
3530
3737
  * Return the log probabilities of the tokens.
3531
3738
  *
@@ -3538,25 +3745,25 @@ var openaiResponsesProviderOptionsSchema = z15.object({
3538
3745
  * @see https://platform.openai.com/docs/api-reference/responses/create
3539
3746
  * @see https://cookbook.openai.com/examples/using_logprobs
3540
3747
  */
3541
- logprobs: z15.union([z15.boolean(), z15.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
3748
+ logprobs: z16.union([z16.boolean(), z16.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
3542
3749
  /**
3543
3750
  * The maximum number of total calls to built-in tools that can be processed in a response.
3544
3751
  * This maximum number applies across all built-in tool calls, not per individual tool.
3545
3752
  * Any further attempts to call a tool by the model will be ignored.
3546
3753
  */
3547
- maxToolCalls: z15.number().nullish(),
3548
- metadata: z15.any().nullish(),
3549
- parallelToolCalls: z15.boolean().nullish(),
3550
- previousResponseId: z15.string().nullish(),
3551
- promptCacheKey: z15.string().nullish(),
3552
- reasoningEffort: z15.string().nullish(),
3553
- reasoningSummary: z15.string().nullish(),
3554
- safetyIdentifier: z15.string().nullish(),
3555
- serviceTier: z15.enum(["auto", "flex", "priority"]).nullish(),
3556
- store: z15.boolean().nullish(),
3557
- strictJsonSchema: z15.boolean().nullish(),
3558
- textVerbosity: z15.enum(["low", "medium", "high"]).nullish(),
3559
- user: z15.string().nullish()
3754
+ maxToolCalls: z16.number().nullish(),
3755
+ metadata: z16.any().nullish(),
3756
+ parallelToolCalls: z16.boolean().nullish(),
3757
+ previousResponseId: z16.string().nullish(),
3758
+ promptCacheKey: z16.string().nullish(),
3759
+ reasoningEffort: z16.string().nullish(),
3760
+ reasoningSummary: z16.string().nullish(),
3761
+ safetyIdentifier: z16.string().nullish(),
3762
+ serviceTier: z16.enum(["auto", "flex", "priority"]).nullish(),
3763
+ store: z16.boolean().nullish(),
3764
+ strictJsonSchema: z16.boolean().nullish(),
3765
+ textVerbosity: z16.enum(["low", "medium", "high"]).nullish(),
3766
+ user: z16.string().nullish()
3560
3767
  });
3561
3768
 
3562
3769
  // src/speech/openai-speech-model.ts
@@ -3566,10 +3773,10 @@ import {
3566
3773
  parseProviderOptions as parseProviderOptions6,
3567
3774
  postJsonToApi as postJsonToApi6
3568
3775
  } from "@ai-sdk/provider-utils";
3569
- import { z as z16 } from "zod/v4";
3570
- var OpenAIProviderOptionsSchema = z16.object({
3571
- instructions: z16.string().nullish(),
3572
- speed: z16.number().min(0.25).max(4).default(1).nullish()
3776
+ import { z as z17 } from "zod/v4";
3777
+ var OpenAIProviderOptionsSchema = z17.object({
3778
+ instructions: z17.string().nullish(),
3779
+ speed: z17.number().min(0.25).max(4).default(1).nullish()
3573
3780
  });
3574
3781
  var OpenAISpeechModel = class {
3575
3782
  constructor(modelId, config) {
@@ -3680,33 +3887,33 @@ import {
3680
3887
  parseProviderOptions as parseProviderOptions7,
3681
3888
  postFormDataToApi
3682
3889
  } from "@ai-sdk/provider-utils";
3683
- import { z as z18 } from "zod/v4";
3890
+ import { z as z19 } from "zod/v4";
3684
3891
 
3685
3892
  // src/transcription/openai-transcription-options.ts
3686
- import { z as z17 } from "zod/v4";
3687
- var openAITranscriptionProviderOptions = z17.object({
3893
+ import { z as z18 } from "zod/v4";
3894
+ var openAITranscriptionProviderOptions = z18.object({
3688
3895
  /**
3689
3896
  * Additional information to include in the transcription response.
3690
3897
  */
3691
- include: z17.array(z17.string()).optional(),
3898
+ include: z18.array(z18.string()).optional(),
3692
3899
  /**
3693
3900
  * The language of the input audio in ISO-639-1 format.
3694
3901
  */
3695
- language: z17.string().optional(),
3902
+ language: z18.string().optional(),
3696
3903
  /**
3697
3904
  * An optional text to guide the model's style or continue a previous audio segment.
3698
3905
  */
3699
- prompt: z17.string().optional(),
3906
+ prompt: z18.string().optional(),
3700
3907
  /**
3701
3908
  * The sampling temperature, between 0 and 1.
3702
3909
  * @default 0
3703
3910
  */
3704
- temperature: z17.number().min(0).max(1).default(0).optional(),
3911
+ temperature: z18.number().min(0).max(1).default(0).optional(),
3705
3912
  /**
3706
3913
  * The timestamp granularities to populate for this transcription.
3707
3914
  * @default ['segment']
3708
3915
  */
3709
- timestampGranularities: z17.array(z17.enum(["word", "segment"])).default(["segment"]).optional()
3916
+ timestampGranularities: z18.array(z18.enum(["word", "segment"])).default(["segment"]).optional()
3710
3917
  });
3711
3918
 
3712
3919
  // src/transcription/openai-transcription-model.ts
@@ -3875,48 +4082,59 @@ var OpenAITranscriptionModel = class {
3875
4082
  };
3876
4083
  }
3877
4084
  };
3878
- var openaiTranscriptionResponseSchema = z18.object({
3879
- text: z18.string(),
3880
- language: z18.string().nullish(),
3881
- duration: z18.number().nullish(),
3882
- words: z18.array(
3883
- z18.object({
3884
- word: z18.string(),
3885
- start: z18.number(),
3886
- end: z18.number()
4085
+ var openaiTranscriptionResponseSchema = z19.object({
4086
+ text: z19.string(),
4087
+ language: z19.string().nullish(),
4088
+ duration: z19.number().nullish(),
4089
+ words: z19.array(
4090
+ z19.object({
4091
+ word: z19.string(),
4092
+ start: z19.number(),
4093
+ end: z19.number()
3887
4094
  })
3888
4095
  ).nullish(),
3889
- segments: z18.array(
3890
- z18.object({
3891
- id: z18.number(),
3892
- seek: z18.number(),
3893
- start: z18.number(),
3894
- end: z18.number(),
3895
- text: z18.string(),
3896
- tokens: z18.array(z18.number()),
3897
- temperature: z18.number(),
3898
- avg_logprob: z18.number(),
3899
- compression_ratio: z18.number(),
3900
- no_speech_prob: z18.number()
4096
+ segments: z19.array(
4097
+ z19.object({
4098
+ id: z19.number(),
4099
+ seek: z19.number(),
4100
+ start: z19.number(),
4101
+ end: z19.number(),
4102
+ text: z19.string(),
4103
+ tokens: z19.array(z19.number()),
4104
+ temperature: z19.number(),
4105
+ avg_logprob: z19.number(),
4106
+ compression_ratio: z19.number(),
4107
+ no_speech_prob: z19.number()
3901
4108
  })
3902
4109
  ).nullish()
3903
4110
  });
3904
4111
 
4112
+ // src/version.ts
4113
+ var VERSION = true ? "2.1.0-beta.10" : "0.0.0-test";
4114
+
3905
4115
  // src/openai-provider.ts
3906
4116
  function createOpenAI(options = {}) {
3907
4117
  var _a, _b;
3908
- const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
4118
+ const baseURL = (_a = withoutTrailingSlash(
4119
+ loadOptionalSetting({
4120
+ settingValue: options.baseURL,
4121
+ environmentVariableName: "OPENAI_BASE_URL"
4122
+ })
4123
+ )) != null ? _a : "https://api.openai.com/v1";
3909
4124
  const providerName = (_b = options.name) != null ? _b : "openai";
3910
- const getHeaders = () => ({
3911
- Authorization: `Bearer ${loadApiKey({
3912
- apiKey: options.apiKey,
3913
- environmentVariableName: "OPENAI_API_KEY",
3914
- description: "OpenAI"
3915
- })}`,
3916
- "OpenAI-Organization": options.organization,
3917
- "OpenAI-Project": options.project,
3918
- ...options.headers
3919
- });
4125
+ const getHeaders = () => withUserAgentSuffix(
4126
+ {
4127
+ Authorization: `Bearer ${loadApiKey({
4128
+ apiKey: options.apiKey,
4129
+ environmentVariableName: "OPENAI_API_KEY",
4130
+ description: "OpenAI"
4131
+ })}`,
4132
+ "OpenAI-Organization": options.organization,
4133
+ "OpenAI-Project": options.project,
4134
+ ...options.headers
4135
+ },
4136
+ `ai-sdk/openai/${VERSION}`
4137
+ );
3920
4138
  const createChatModel = (modelId) => new OpenAIChatLanguageModel(modelId, {
3921
4139
  provider: `${providerName}.chat`,
3922
4140
  url: ({ path }) => `${baseURL}${path}`,
@@ -3991,6 +4209,7 @@ function createOpenAI(options = {}) {
3991
4209
  }
3992
4210
  var openai = createOpenAI();
3993
4211
  export {
4212
+ VERSION,
3994
4213
  createOpenAI,
3995
4214
  openai
3996
4215
  };