@ai-sdk/openai 2.1.0-beta.1 → 2.1.0-beta.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,7 +1,9 @@
1
1
  // src/openai-provider.ts
2
2
  import {
3
3
  loadApiKey,
4
- withoutTrailingSlash
4
+ loadOptionalSetting,
5
+ withoutTrailingSlash,
6
+ withUserAgentSuffix
5
7
  } from "@ai-sdk/provider-utils";
6
8
 
7
9
  // src/chat/openai-chat-language-model.ts
@@ -250,7 +252,7 @@ function mapOpenAIFinishReason(finishReason) {
250
252
 
251
253
  // src/chat/openai-chat-options.ts
252
254
  import { z as z2 } from "zod/v4";
253
- var openaiProviderOptions = z2.object({
255
+ var openaiChatLanguageModelOptions = z2.object({
254
256
  /**
255
257
  * Modify the likelihood of specified tokens appearing in the completion.
256
258
  *
@@ -404,7 +406,7 @@ function prepareChatTools({
404
406
  // src/chat/openai-chat-language-model.ts
405
407
  var OpenAIChatLanguageModel = class {
406
408
  constructor(modelId, config) {
407
- this.specificationVersion = "v2";
409
+ this.specificationVersion = "v3";
408
410
  this.supportedUrls = {
409
411
  "image/*": [/^https?:\/\/.*$/]
410
412
  };
@@ -434,7 +436,7 @@ var OpenAIChatLanguageModel = class {
434
436
  const openaiOptions = (_a = await parseProviderOptions({
435
437
  provider: "openai",
436
438
  providerOptions,
437
- schema: openaiProviderOptions
439
+ schema: openaiChatLanguageModelOptions
438
440
  })) != null ? _a : {};
439
441
  const structuredOutputs = (_b = openaiOptions.structuredOutputs) != null ? _b : true;
440
442
  if (topK != null) {
@@ -1222,7 +1224,7 @@ var openaiCompletionProviderOptions = z4.object({
1222
1224
  // src/completion/openai-completion-language-model.ts
1223
1225
  var OpenAICompletionLanguageModel = class {
1224
1226
  constructor(modelId, config) {
1225
- this.specificationVersion = "v2";
1227
+ this.specificationVersion = "v3";
1226
1228
  this.supportedUrls = {
1227
1229
  // No URLs are supported for completion models.
1228
1230
  };
@@ -1522,7 +1524,7 @@ var openaiEmbeddingProviderOptions = z6.object({
1522
1524
  // src/embedding/openai-embedding-model.ts
1523
1525
  var OpenAIEmbeddingModel = class {
1524
1526
  constructor(modelId, config) {
1525
- this.specificationVersion = "v2";
1527
+ this.specificationVersion = "v3";
1526
1528
  this.maxEmbeddingsPerCall = 2048;
1527
1529
  this.supportsParallelCalls = true;
1528
1530
  this.modelId = modelId;
@@ -1608,7 +1610,7 @@ var OpenAIImageModel = class {
1608
1610
  constructor(modelId, config) {
1609
1611
  this.modelId = modelId;
1610
1612
  this.config = config;
1611
- this.specificationVersion = "v2";
1613
+ this.specificationVersion = "v3";
1612
1614
  }
1613
1615
  get maxImagesPerCall() {
1614
1616
  var _a;
@@ -1776,6 +1778,7 @@ var imageGenerationArgsSchema = z11.object({
1776
1778
  moderation: z11.enum(["auto"]).optional(),
1777
1779
  outputCompression: z11.number().int().min(0).max(100).optional(),
1778
1780
  outputFormat: z11.enum(["png", "jpeg", "webp"]).optional(),
1781
+ partialImages: z11.number().int().min(0).max(3).optional(),
1779
1782
  quality: z11.enum(["auto", "low", "medium", "high"]).optional(),
1780
1783
  size: z11.enum(["1024x1024", "1024x1536", "1536x1024", "auto"]).optional()
1781
1784
  }).strict();
@@ -1792,39 +1795,62 @@ var imageGeneration = (args = {}) => {
1792
1795
  return imageGenerationToolFactory(args);
1793
1796
  };
1794
1797
 
1798
+ // src/tool/local-shell.ts
1799
+ import { createProviderDefinedToolFactoryWithOutputSchema as createProviderDefinedToolFactoryWithOutputSchema4 } from "@ai-sdk/provider-utils";
1800
+ import { z as z12 } from "zod/v4";
1801
+ var localShellInputSchema = z12.object({
1802
+ action: z12.object({
1803
+ type: z12.literal("exec"),
1804
+ command: z12.array(z12.string()),
1805
+ timeoutMs: z12.number().optional(),
1806
+ user: z12.string().optional(),
1807
+ workingDirectory: z12.string().optional(),
1808
+ env: z12.record(z12.string(), z12.string()).optional()
1809
+ })
1810
+ });
1811
+ var localShellOutputSchema = z12.object({
1812
+ output: z12.string()
1813
+ });
1814
+ var localShell = createProviderDefinedToolFactoryWithOutputSchema4({
1815
+ id: "openai.local_shell",
1816
+ name: "local_shell",
1817
+ inputSchema: localShellInputSchema,
1818
+ outputSchema: localShellOutputSchema
1819
+ });
1820
+
1795
1821
  // src/tool/web-search.ts
1796
1822
  import { createProviderDefinedToolFactory } from "@ai-sdk/provider-utils";
1797
- import { z as z12 } from "zod/v4";
1798
- var webSearchArgsSchema = z12.object({
1799
- filters: z12.object({
1800
- allowedDomains: z12.array(z12.string()).optional()
1823
+ import { z as z13 } from "zod/v4";
1824
+ var webSearchArgsSchema = z13.object({
1825
+ filters: z13.object({
1826
+ allowedDomains: z13.array(z13.string()).optional()
1801
1827
  }).optional(),
1802
- searchContextSize: z12.enum(["low", "medium", "high"]).optional(),
1803
- userLocation: z12.object({
1804
- type: z12.literal("approximate"),
1805
- country: z12.string().optional(),
1806
- city: z12.string().optional(),
1807
- region: z12.string().optional(),
1808
- timezone: z12.string().optional()
1828
+ searchContextSize: z13.enum(["low", "medium", "high"]).optional(),
1829
+ userLocation: z13.object({
1830
+ type: z13.literal("approximate"),
1831
+ country: z13.string().optional(),
1832
+ city: z13.string().optional(),
1833
+ region: z13.string().optional(),
1834
+ timezone: z13.string().optional()
1809
1835
  }).optional()
1810
1836
  });
1811
1837
  var webSearchToolFactory = createProviderDefinedToolFactory({
1812
1838
  id: "openai.web_search",
1813
1839
  name: "web_search",
1814
- inputSchema: z12.object({
1815
- action: z12.discriminatedUnion("type", [
1816
- z12.object({
1817
- type: z12.literal("search"),
1818
- query: z12.string().nullish()
1840
+ inputSchema: z13.object({
1841
+ action: z13.discriminatedUnion("type", [
1842
+ z13.object({
1843
+ type: z13.literal("search"),
1844
+ query: z13.string().nullish()
1819
1845
  }),
1820
- z12.object({
1821
- type: z12.literal("open_page"),
1822
- url: z12.string()
1846
+ z13.object({
1847
+ type: z13.literal("open_page"),
1848
+ url: z13.string()
1823
1849
  }),
1824
- z12.object({
1825
- type: z12.literal("find"),
1826
- url: z12.string(),
1827
- pattern: z12.string()
1850
+ z13.object({
1851
+ type: z13.literal("find"),
1852
+ url: z13.string(),
1853
+ pattern: z13.string()
1828
1854
  })
1829
1855
  ]).nullish()
1830
1856
  })
@@ -1835,58 +1861,58 @@ var webSearch = (args = {}) => {
1835
1861
 
1836
1862
  // src/tool/web-search-preview.ts
1837
1863
  import { createProviderDefinedToolFactory as createProviderDefinedToolFactory2 } from "@ai-sdk/provider-utils";
1838
- import { z as z13 } from "zod/v4";
1839
- var webSearchPreviewArgsSchema = z13.object({
1864
+ import { z as z14 } from "zod/v4";
1865
+ var webSearchPreviewArgsSchema = z14.object({
1840
1866
  /**
1841
1867
  * Search context size to use for the web search.
1842
1868
  * - high: Most comprehensive context, highest cost, slower response
1843
1869
  * - medium: Balanced context, cost, and latency (default)
1844
1870
  * - low: Least context, lowest cost, fastest response
1845
1871
  */
1846
- searchContextSize: z13.enum(["low", "medium", "high"]).optional(),
1872
+ searchContextSize: z14.enum(["low", "medium", "high"]).optional(),
1847
1873
  /**
1848
1874
  * User location information to provide geographically relevant search results.
1849
1875
  */
1850
- userLocation: z13.object({
1876
+ userLocation: z14.object({
1851
1877
  /**
1852
1878
  * Type of location (always 'approximate')
1853
1879
  */
1854
- type: z13.literal("approximate"),
1880
+ type: z14.literal("approximate"),
1855
1881
  /**
1856
1882
  * Two-letter ISO country code (e.g., 'US', 'GB')
1857
1883
  */
1858
- country: z13.string().optional(),
1884
+ country: z14.string().optional(),
1859
1885
  /**
1860
1886
  * City name (free text, e.g., 'Minneapolis')
1861
1887
  */
1862
- city: z13.string().optional(),
1888
+ city: z14.string().optional(),
1863
1889
  /**
1864
1890
  * Region name (free text, e.g., 'Minnesota')
1865
1891
  */
1866
- region: z13.string().optional(),
1892
+ region: z14.string().optional(),
1867
1893
  /**
1868
1894
  * IANA timezone (e.g., 'America/Chicago')
1869
1895
  */
1870
- timezone: z13.string().optional()
1896
+ timezone: z14.string().optional()
1871
1897
  }).optional()
1872
1898
  });
1873
1899
  var webSearchPreview = createProviderDefinedToolFactory2({
1874
1900
  id: "openai.web_search_preview",
1875
1901
  name: "web_search_preview",
1876
- inputSchema: z13.object({
1877
- action: z13.discriminatedUnion("type", [
1878
- z13.object({
1879
- type: z13.literal("search"),
1880
- query: z13.string().nullish()
1902
+ inputSchema: z14.object({
1903
+ action: z14.discriminatedUnion("type", [
1904
+ z14.object({
1905
+ type: z14.literal("search"),
1906
+ query: z14.string().nullish()
1881
1907
  }),
1882
- z13.object({
1883
- type: z13.literal("open_page"),
1884
- url: z13.string()
1908
+ z14.object({
1909
+ type: z14.literal("open_page"),
1910
+ url: z14.string()
1885
1911
  }),
1886
- z13.object({
1887
- type: z13.literal("find"),
1888
- url: z13.string(),
1889
- pattern: z13.string()
1912
+ z14.object({
1913
+ type: z14.literal("find"),
1914
+ url: z14.string(),
1915
+ pattern: z14.string()
1890
1916
  })
1891
1917
  ]).nullish()
1892
1918
  })
@@ -1924,13 +1950,27 @@ var openaiTools = {
1924
1950
  *
1925
1951
  * Must have name `image_generation`.
1926
1952
  *
1927
- * @param size - Image dimensions (e.g., 1024x1024, 1024x1536)
1928
- * @param quality - Rendering quality (e.g. low, medium, high)
1929
- * @param format - File output format
1930
- * @param compression - Compression level (0-100%) for JPEG and WebP formats
1931
- * @param background - Transparent or opaque
1953
+ * @param background - Background type for the generated image. One of 'auto', 'opaque', or 'transparent'.
1954
+ * @param inputFidelity - Input fidelity for the generated image. One of 'low' or 'high'.
1955
+ * @param inputImageMask - Optional mask for inpainting. Contains fileId and/or imageUrl.
1956
+ * @param model - The image generation model to use. Default: gpt-image-1.
1957
+ * @param moderation - Moderation level for the generated image. Default: 'auto'.
1958
+ * @param outputCompression - Compression level for the output image (0-100).
1959
+ * @param outputFormat - The output format of the generated image. One of 'png', 'jpeg', or 'webp'.
1960
+ * @param partialImages - Number of partial images to generate in streaming mode (0-3).
1961
+ * @param quality - The quality of the generated image. One of 'auto', 'low', 'medium', or 'high'.
1962
+ * @param size - The size of the generated image. One of 'auto', '1024x1024', '1024x1536', or '1536x1024'.
1932
1963
  */
1933
1964
  imageGeneration,
1965
+ /**
1966
+ * Local shell is a tool that allows agents to run shell commands locally
1967
+ * on a machine you or the user provides.
1968
+ *
1969
+ * Supported models: `gpt-5-codex` and `codex-mini-latest`
1970
+ *
1971
+ * Must have name `local_shell`.
1972
+ */
1973
+ localShell,
1934
1974
  /**
1935
1975
  * Web search allows models to access up-to-date information from the internet
1936
1976
  * and provide answers with sourced citations.
@@ -1968,14 +2008,14 @@ import {
1968
2008
  parseProviderOptions as parseProviderOptions5,
1969
2009
  postJsonToApi as postJsonToApi5
1970
2010
  } from "@ai-sdk/provider-utils";
1971
- import { z as z15 } from "zod/v4";
2011
+ import { z as z16 } from "zod/v4";
1972
2012
 
1973
2013
  // src/responses/convert-to-openai-responses-input.ts
1974
2014
  import {
1975
2015
  UnsupportedFunctionalityError as UnsupportedFunctionalityError4
1976
2016
  } from "@ai-sdk/provider";
1977
2017
  import { convertToBase64 as convertToBase642, parseProviderOptions as parseProviderOptions4 } from "@ai-sdk/provider-utils";
1978
- import { z as z14 } from "zod/v4";
2018
+ import { z as z15 } from "zod/v4";
1979
2019
  function isFileId(data, prefixes) {
1980
2020
  if (!prefixes) return false;
1981
2021
  return prefixes.some((prefix) => data.startsWith(prefix));
@@ -1984,9 +2024,10 @@ async function convertToOpenAIResponsesInput({
1984
2024
  prompt,
1985
2025
  systemMessageMode,
1986
2026
  fileIdPrefixes,
1987
- store
2027
+ store,
2028
+ hasLocalShellTool = false
1988
2029
  }) {
1989
- var _a, _b, _c, _d, _e, _f;
2030
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i;
1990
2031
  const input = [];
1991
2032
  const warnings = [];
1992
2033
  for (const { role, content } of prompt) {
@@ -2079,12 +2120,29 @@ async function convertToOpenAIResponsesInput({
2079
2120
  if (part.providerExecuted) {
2080
2121
  break;
2081
2122
  }
2123
+ if (hasLocalShellTool && part.toolName === "local_shell") {
2124
+ const parsedInput = localShellInputSchema.parse(part.input);
2125
+ input.push({
2126
+ type: "local_shell_call",
2127
+ call_id: part.toolCallId,
2128
+ id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0,
2129
+ action: {
2130
+ type: "exec",
2131
+ command: parsedInput.action.command,
2132
+ timeout_ms: parsedInput.action.timeoutMs,
2133
+ user: parsedInput.action.user,
2134
+ working_directory: parsedInput.action.workingDirectory,
2135
+ env: parsedInput.action.env
2136
+ }
2137
+ });
2138
+ break;
2139
+ }
2082
2140
  input.push({
2083
2141
  type: "function_call",
2084
2142
  call_id: part.toolCallId,
2085
2143
  name: part.toolName,
2086
2144
  arguments: JSON.stringify(part.input),
2087
- id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0
2145
+ id: (_i = (_h = (_g = part.providerOptions) == null ? void 0 : _g.openai) == null ? void 0 : _h.itemId) != null ? _i : void 0
2088
2146
  });
2089
2147
  break;
2090
2148
  }
@@ -2108,26 +2166,40 @@ async function convertToOpenAIResponsesInput({
2108
2166
  });
2109
2167
  const reasoningId = providerOptions == null ? void 0 : providerOptions.itemId;
2110
2168
  if (reasoningId != null) {
2111
- const existingReasoningMessage = reasoningMessages[reasoningId];
2112
- const summaryParts = [];
2113
- if (part.text.length > 0) {
2114
- summaryParts.push({ type: "summary_text", text: part.text });
2115
- } else if (existingReasoningMessage !== void 0) {
2116
- warnings.push({
2117
- type: "other",
2118
- message: `Cannot append empty reasoning part to existing reasoning sequence. Skipping reasoning part: ${JSON.stringify(part)}.`
2119
- });
2120
- }
2121
- if (existingReasoningMessage === void 0) {
2122
- reasoningMessages[reasoningId] = {
2123
- type: "reasoning",
2124
- id: reasoningId,
2125
- encrypted_content: providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent,
2126
- summary: summaryParts
2127
- };
2128
- input.push(reasoningMessages[reasoningId]);
2169
+ const reasoningMessage = reasoningMessages[reasoningId];
2170
+ if (store) {
2171
+ if (reasoningMessage === void 0) {
2172
+ input.push({ type: "item_reference", id: reasoningId });
2173
+ reasoningMessages[reasoningId] = {
2174
+ type: "reasoning",
2175
+ id: reasoningId,
2176
+ summary: []
2177
+ };
2178
+ }
2129
2179
  } else {
2130
- existingReasoningMessage.summary.push(...summaryParts);
2180
+ const summaryParts = [];
2181
+ if (part.text.length > 0) {
2182
+ summaryParts.push({
2183
+ type: "summary_text",
2184
+ text: part.text
2185
+ });
2186
+ } else if (reasoningMessage !== void 0) {
2187
+ warnings.push({
2188
+ type: "other",
2189
+ message: `Cannot append empty reasoning part to existing reasoning sequence. Skipping reasoning part: ${JSON.stringify(part)}.`
2190
+ });
2191
+ }
2192
+ if (reasoningMessage === void 0) {
2193
+ reasoningMessages[reasoningId] = {
2194
+ type: "reasoning",
2195
+ id: reasoningId,
2196
+ encrypted_content: providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent,
2197
+ summary: summaryParts
2198
+ };
2199
+ input.push(reasoningMessages[reasoningId]);
2200
+ } else {
2201
+ reasoningMessage.summary.push(...summaryParts);
2202
+ }
2131
2203
  }
2132
2204
  } else {
2133
2205
  warnings.push({
@@ -2144,6 +2216,14 @@ async function convertToOpenAIResponsesInput({
2144
2216
  case "tool": {
2145
2217
  for (const part of content) {
2146
2218
  const output = part.output;
2219
+ if (hasLocalShellTool && part.toolName === "local_shell" && output.type === "json") {
2220
+ input.push({
2221
+ type: "local_shell_call_output",
2222
+ call_id: part.toolCallId,
2223
+ output: localShellOutputSchema.parse(output.value).output
2224
+ });
2225
+ break;
2226
+ }
2147
2227
  let contentValue;
2148
2228
  switch (output.type) {
2149
2229
  case "text":
@@ -2172,9 +2252,9 @@ async function convertToOpenAIResponsesInput({
2172
2252
  }
2173
2253
  return { input, warnings };
2174
2254
  }
2175
- var openaiResponsesReasoningProviderOptionsSchema = z14.object({
2176
- itemId: z14.string().nullish(),
2177
- reasoningEncryptedContent: z14.string().nullish()
2255
+ var openaiResponsesReasoningProviderOptionsSchema = z15.object({
2256
+ itemId: z15.string().nullish(),
2257
+ reasoningEncryptedContent: z15.string().nullish()
2178
2258
  });
2179
2259
 
2180
2260
  // src/responses/map-openai-responses-finish-reason.ts
@@ -2237,6 +2317,12 @@ function prepareResponsesTools({
2237
2317
  });
2238
2318
  break;
2239
2319
  }
2320
+ case "openai.local_shell": {
2321
+ openaiTools2.push({
2322
+ type: "local_shell"
2323
+ });
2324
+ break;
2325
+ }
2240
2326
  case "openai.web_search_preview": {
2241
2327
  const args = webSearchPreviewArgsSchema.parse(tool.args);
2242
2328
  openaiTools2.push({
@@ -2275,11 +2361,12 @@ function prepareResponsesTools({
2275
2361
  image_url: args.inputImageMask.imageUrl
2276
2362
  } : void 0,
2277
2363
  model: args.model,
2278
- size: args.size,
2279
- quality: args.quality,
2280
2364
  moderation: args.moderation,
2365
+ partial_images: args.partialImages,
2366
+ quality: args.quality,
2367
+ output_compression: args.outputCompression,
2281
2368
  output_format: args.outputFormat,
2282
- output_compression: args.outputCompression
2369
+ size: args.size
2283
2370
  });
2284
2371
  break;
2285
2372
  }
@@ -2316,73 +2403,86 @@ function prepareResponsesTools({
2316
2403
  }
2317
2404
 
2318
2405
  // src/responses/openai-responses-language-model.ts
2319
- var webSearchCallItem = z15.object({
2320
- type: z15.literal("web_search_call"),
2321
- id: z15.string(),
2322
- status: z15.string(),
2323
- action: z15.discriminatedUnion("type", [
2324
- z15.object({
2325
- type: z15.literal("search"),
2326
- query: z15.string().nullish()
2406
+ var webSearchCallItem = z16.object({
2407
+ type: z16.literal("web_search_call"),
2408
+ id: z16.string(),
2409
+ status: z16.string(),
2410
+ action: z16.discriminatedUnion("type", [
2411
+ z16.object({
2412
+ type: z16.literal("search"),
2413
+ query: z16.string().nullish()
2327
2414
  }),
2328
- z15.object({
2329
- type: z15.literal("open_page"),
2330
- url: z15.string()
2415
+ z16.object({
2416
+ type: z16.literal("open_page"),
2417
+ url: z16.string()
2331
2418
  }),
2332
- z15.object({
2333
- type: z15.literal("find"),
2334
- url: z15.string(),
2335
- pattern: z15.string()
2419
+ z16.object({
2420
+ type: z16.literal("find"),
2421
+ url: z16.string(),
2422
+ pattern: z16.string()
2336
2423
  })
2337
2424
  ]).nullish()
2338
2425
  });
2339
- var fileSearchCallItem = z15.object({
2340
- type: z15.literal("file_search_call"),
2341
- id: z15.string(),
2342
- queries: z15.array(z15.string()),
2343
- results: z15.array(
2344
- z15.object({
2345
- attributes: z15.record(z15.string(), z15.unknown()),
2346
- file_id: z15.string(),
2347
- filename: z15.string(),
2348
- score: z15.number(),
2349
- text: z15.string()
2426
+ var fileSearchCallItem = z16.object({
2427
+ type: z16.literal("file_search_call"),
2428
+ id: z16.string(),
2429
+ queries: z16.array(z16.string()),
2430
+ results: z16.array(
2431
+ z16.object({
2432
+ attributes: z16.record(z16.string(), z16.unknown()),
2433
+ file_id: z16.string(),
2434
+ filename: z16.string(),
2435
+ score: z16.number(),
2436
+ text: z16.string()
2350
2437
  })
2351
2438
  ).nullish()
2352
2439
  });
2353
- var codeInterpreterCallItem = z15.object({
2354
- type: z15.literal("code_interpreter_call"),
2355
- id: z15.string(),
2356
- code: z15.string().nullable(),
2357
- container_id: z15.string(),
2358
- outputs: z15.array(
2359
- z15.discriminatedUnion("type", [
2360
- z15.object({ type: z15.literal("logs"), logs: z15.string() }),
2361
- z15.object({ type: z15.literal("image"), url: z15.string() })
2440
+ var codeInterpreterCallItem = z16.object({
2441
+ type: z16.literal("code_interpreter_call"),
2442
+ id: z16.string(),
2443
+ code: z16.string().nullable(),
2444
+ container_id: z16.string(),
2445
+ outputs: z16.array(
2446
+ z16.discriminatedUnion("type", [
2447
+ z16.object({ type: z16.literal("logs"), logs: z16.string() }),
2448
+ z16.object({ type: z16.literal("image"), url: z16.string() })
2362
2449
  ])
2363
2450
  ).nullable()
2364
2451
  });
2365
- var imageGenerationCallItem = z15.object({
2366
- type: z15.literal("image_generation_call"),
2367
- id: z15.string(),
2368
- result: z15.string()
2452
+ var localShellCallItem = z16.object({
2453
+ type: z16.literal("local_shell_call"),
2454
+ id: z16.string(),
2455
+ call_id: z16.string(),
2456
+ action: z16.object({
2457
+ type: z16.literal("exec"),
2458
+ command: z16.array(z16.string()),
2459
+ timeout_ms: z16.number().optional(),
2460
+ user: z16.string().optional(),
2461
+ working_directory: z16.string().optional(),
2462
+ env: z16.record(z16.string(), z16.string()).optional()
2463
+ })
2464
+ });
2465
+ var imageGenerationCallItem = z16.object({
2466
+ type: z16.literal("image_generation_call"),
2467
+ id: z16.string(),
2468
+ result: z16.string()
2369
2469
  });
2370
2470
  var TOP_LOGPROBS_MAX = 20;
2371
- var LOGPROBS_SCHEMA = z15.array(
2372
- z15.object({
2373
- token: z15.string(),
2374
- logprob: z15.number(),
2375
- top_logprobs: z15.array(
2376
- z15.object({
2377
- token: z15.string(),
2378
- logprob: z15.number()
2471
+ var LOGPROBS_SCHEMA = z16.array(
2472
+ z16.object({
2473
+ token: z16.string(),
2474
+ logprob: z16.number(),
2475
+ top_logprobs: z16.array(
2476
+ z16.object({
2477
+ token: z16.string(),
2478
+ logprob: z16.number()
2379
2479
  })
2380
2480
  )
2381
2481
  })
2382
2482
  );
2383
2483
  var OpenAIResponsesLanguageModel = class {
2384
2484
  constructor(modelId, config) {
2385
- this.specificationVersion = "v2";
2485
+ this.specificationVersion = "v3";
2386
2486
  this.supportedUrls = {
2387
2487
  "image/*": [/^https?:\/\/.*$/],
2388
2488
  "application/pdf": [/^https?:\/\/.*$/]
@@ -2441,7 +2541,8 @@ var OpenAIResponsesLanguageModel = class {
2441
2541
  prompt,
2442
2542
  systemMessageMode: modelConfig.systemMessageMode,
2443
2543
  fileIdPrefixes: this.config.fileIdPrefixes,
2444
- store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true
2544
+ store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true,
2545
+ hasLocalShellTool: hasOpenAITool("openai.local_shell")
2445
2546
  });
2446
2547
  warnings.push(...inputWarnings);
2447
2548
  const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
@@ -2606,45 +2707,45 @@ var OpenAIResponsesLanguageModel = class {
2606
2707
  body,
2607
2708
  failedResponseHandler: openaiFailedResponseHandler,
2608
2709
  successfulResponseHandler: createJsonResponseHandler5(
2609
- z15.object({
2610
- id: z15.string(),
2611
- created_at: z15.number(),
2612
- error: z15.object({
2613
- code: z15.string(),
2614
- message: z15.string()
2710
+ z16.object({
2711
+ id: z16.string(),
2712
+ created_at: z16.number(),
2713
+ error: z16.object({
2714
+ code: z16.string(),
2715
+ message: z16.string()
2615
2716
  }).nullish(),
2616
- model: z15.string(),
2617
- output: z15.array(
2618
- z15.discriminatedUnion("type", [
2619
- z15.object({
2620
- type: z15.literal("message"),
2621
- role: z15.literal("assistant"),
2622
- id: z15.string(),
2623
- content: z15.array(
2624
- z15.object({
2625
- type: z15.literal("output_text"),
2626
- text: z15.string(),
2717
+ model: z16.string(),
2718
+ output: z16.array(
2719
+ z16.discriminatedUnion("type", [
2720
+ z16.object({
2721
+ type: z16.literal("message"),
2722
+ role: z16.literal("assistant"),
2723
+ id: z16.string(),
2724
+ content: z16.array(
2725
+ z16.object({
2726
+ type: z16.literal("output_text"),
2727
+ text: z16.string(),
2627
2728
  logprobs: LOGPROBS_SCHEMA.nullish(),
2628
- annotations: z15.array(
2629
- z15.discriminatedUnion("type", [
2630
- z15.object({
2631
- type: z15.literal("url_citation"),
2632
- start_index: z15.number(),
2633
- end_index: z15.number(),
2634
- url: z15.string(),
2635
- title: z15.string()
2729
+ annotations: z16.array(
2730
+ z16.discriminatedUnion("type", [
2731
+ z16.object({
2732
+ type: z16.literal("url_citation"),
2733
+ start_index: z16.number(),
2734
+ end_index: z16.number(),
2735
+ url: z16.string(),
2736
+ title: z16.string()
2636
2737
  }),
2637
- z15.object({
2638
- type: z15.literal("file_citation"),
2639
- file_id: z15.string(),
2640
- filename: z15.string().nullish(),
2641
- index: z15.number().nullish(),
2642
- start_index: z15.number().nullish(),
2643
- end_index: z15.number().nullish(),
2644
- quote: z15.string().nullish()
2738
+ z16.object({
2739
+ type: z16.literal("file_citation"),
2740
+ file_id: z16.string(),
2741
+ filename: z16.string().nullish(),
2742
+ index: z16.number().nullish(),
2743
+ start_index: z16.number().nullish(),
2744
+ end_index: z16.number().nullish(),
2745
+ quote: z16.string().nullish()
2645
2746
  }),
2646
- z15.object({
2647
- type: z15.literal("container_file_citation")
2747
+ z16.object({
2748
+ type: z16.literal("container_file_citation")
2648
2749
  })
2649
2750
  ])
2650
2751
  )
@@ -2655,33 +2756,34 @@ var OpenAIResponsesLanguageModel = class {
2655
2756
  fileSearchCallItem,
2656
2757
  codeInterpreterCallItem,
2657
2758
  imageGenerationCallItem,
2658
- z15.object({
2659
- type: z15.literal("function_call"),
2660
- call_id: z15.string(),
2661
- name: z15.string(),
2662
- arguments: z15.string(),
2663
- id: z15.string()
2759
+ localShellCallItem,
2760
+ z16.object({
2761
+ type: z16.literal("function_call"),
2762
+ call_id: z16.string(),
2763
+ name: z16.string(),
2764
+ arguments: z16.string(),
2765
+ id: z16.string()
2664
2766
  }),
2665
- z15.object({
2666
- type: z15.literal("computer_call"),
2667
- id: z15.string(),
2668
- status: z15.string().optional()
2767
+ z16.object({
2768
+ type: z16.literal("computer_call"),
2769
+ id: z16.string(),
2770
+ status: z16.string().optional()
2669
2771
  }),
2670
- z15.object({
2671
- type: z15.literal("reasoning"),
2672
- id: z15.string(),
2673
- encrypted_content: z15.string().nullish(),
2674
- summary: z15.array(
2675
- z15.object({
2676
- type: z15.literal("summary_text"),
2677
- text: z15.string()
2772
+ z16.object({
2773
+ type: z16.literal("reasoning"),
2774
+ id: z16.string(),
2775
+ encrypted_content: z16.string().nullish(),
2776
+ summary: z16.array(
2777
+ z16.object({
2778
+ type: z16.literal("summary_text"),
2779
+ text: z16.string()
2678
2780
  })
2679
2781
  )
2680
2782
  })
2681
2783
  ])
2682
2784
  ),
2683
- service_tier: z15.string().nullish(),
2684
- incomplete_details: z15.object({ reason: z15.string() }).nullable(),
2785
+ service_tier: z16.string().nullish(),
2786
+ incomplete_details: z16.object({ reason: z16.string() }).nullish(),
2685
2787
  usage: usageSchema2
2686
2788
  })
2687
2789
  ),
@@ -2741,6 +2843,20 @@ var OpenAIResponsesLanguageModel = class {
2741
2843
  });
2742
2844
  break;
2743
2845
  }
2846
+ case "local_shell_call": {
2847
+ content.push({
2848
+ type: "tool-call",
2849
+ toolCallId: part.call_id,
2850
+ toolName: "local_shell",
2851
+ input: JSON.stringify({ action: part.action }),
2852
+ providerMetadata: {
2853
+ openai: {
2854
+ itemId: part.id
2855
+ }
2856
+ }
2857
+ });
2858
+ break;
2859
+ }
2744
2860
  case "message": {
2745
2861
  for (const contentPart of part.content) {
2746
2862
  if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
@@ -2998,6 +3114,24 @@ var OpenAIResponsesLanguageModel = class {
2998
3114
  id: value.item.id,
2999
3115
  toolName: "computer_use"
3000
3116
  });
3117
+ } else if (value.item.type === "code_interpreter_call") {
3118
+ ongoingToolCalls[value.output_index] = {
3119
+ toolName: "code_interpreter",
3120
+ toolCallId: value.item.id,
3121
+ codeInterpreter: {
3122
+ containerId: value.item.container_id
3123
+ }
3124
+ };
3125
+ controller.enqueue({
3126
+ type: "tool-input-start",
3127
+ id: value.item.id,
3128
+ toolName: "code_interpreter"
3129
+ });
3130
+ controller.enqueue({
3131
+ type: "tool-input-delta",
3132
+ id: value.item.id,
3133
+ delta: `{"containerId":"${value.item.container_id}","code":"`
3134
+ });
3001
3135
  } else if (value.item.type === "file_search_call") {
3002
3136
  controller.enqueue({
3003
3137
  type: "tool-call",
@@ -3121,16 +3255,7 @@ var OpenAIResponsesLanguageModel = class {
3121
3255
  providerExecuted: true
3122
3256
  });
3123
3257
  } else if (value.item.type === "code_interpreter_call") {
3124
- controller.enqueue({
3125
- type: "tool-call",
3126
- toolCallId: value.item.id,
3127
- toolName: "code_interpreter",
3128
- input: JSON.stringify({
3129
- code: value.item.code,
3130
- containerId: value.item.container_id
3131
- }),
3132
- providerExecuted: true
3133
- });
3258
+ ongoingToolCalls[value.output_index] = void 0;
3134
3259
  controller.enqueue({
3135
3260
  type: "tool-result",
3136
3261
  toolCallId: value.item.id,
@@ -3150,6 +3275,26 @@ var OpenAIResponsesLanguageModel = class {
3150
3275
  },
3151
3276
  providerExecuted: true
3152
3277
  });
3278
+ } else if (value.item.type === "local_shell_call") {
3279
+ ongoingToolCalls[value.output_index] = void 0;
3280
+ controller.enqueue({
3281
+ type: "tool-call",
3282
+ toolCallId: value.item.call_id,
3283
+ toolName: "local_shell",
3284
+ input: JSON.stringify({
3285
+ action: {
3286
+ type: "exec",
3287
+ command: value.item.action.command,
3288
+ timeoutMs: value.item.action.timeout_ms,
3289
+ user: value.item.action.user,
3290
+ workingDirectory: value.item.action.working_directory,
3291
+ env: value.item.action.env
3292
+ }
3293
+ }),
3294
+ providerMetadata: {
3295
+ openai: { itemId: value.item.id }
3296
+ }
3297
+ });
3153
3298
  } else if (value.item.type === "message") {
3154
3299
  controller.enqueue({
3155
3300
  type: "text-end",
@@ -3180,6 +3325,51 @@ var OpenAIResponsesLanguageModel = class {
3180
3325
  delta: value.delta
3181
3326
  });
3182
3327
  }
3328
+ } else if (isResponseImageGenerationCallPartialImageChunk(value)) {
3329
+ controller.enqueue({
3330
+ type: "tool-result",
3331
+ toolCallId: value.item_id,
3332
+ toolName: "image_generation",
3333
+ result: {
3334
+ result: value.partial_image_b64
3335
+ },
3336
+ providerExecuted: true,
3337
+ preliminary: true
3338
+ });
3339
+ } else if (isResponseCodeInterpreterCallCodeDeltaChunk(value)) {
3340
+ const toolCall = ongoingToolCalls[value.output_index];
3341
+ if (toolCall != null) {
3342
+ controller.enqueue({
3343
+ type: "tool-input-delta",
3344
+ id: toolCall.toolCallId,
3345
+ // The delta is code, which is embedding in a JSON string.
3346
+ // To escape it, we use JSON.stringify and slice to remove the outer quotes.
3347
+ delta: JSON.stringify(value.delta).slice(1, -1)
3348
+ });
3349
+ }
3350
+ } else if (isResponseCodeInterpreterCallCodeDoneChunk(value)) {
3351
+ const toolCall = ongoingToolCalls[value.output_index];
3352
+ if (toolCall != null) {
3353
+ controller.enqueue({
3354
+ type: "tool-input-delta",
3355
+ id: toolCall.toolCallId,
3356
+ delta: '"}'
3357
+ });
3358
+ controller.enqueue({
3359
+ type: "tool-input-end",
3360
+ id: toolCall.toolCallId
3361
+ });
3362
+ controller.enqueue({
3363
+ type: "tool-call",
3364
+ toolCallId: toolCall.toolCallId,
3365
+ toolName: "code_interpreter",
3366
+ input: JSON.stringify({
3367
+ code: value.code,
3368
+ containerId: toolCall.codeInterpreter.containerId
3369
+ }),
3370
+ providerExecuted: true
3371
+ });
3372
+ }
3183
3373
  } else if (isResponseCreatedChunk(value)) {
3184
3374
  responseId = value.response.id;
3185
3375
  controller.enqueue({
@@ -3286,166 +3476,201 @@ var OpenAIResponsesLanguageModel = class {
3286
3476
  };
3287
3477
  }
3288
3478
  };
3289
- var usageSchema2 = z15.object({
3290
- input_tokens: z15.number(),
3291
- input_tokens_details: z15.object({ cached_tokens: z15.number().nullish() }).nullish(),
3292
- output_tokens: z15.number(),
3293
- output_tokens_details: z15.object({ reasoning_tokens: z15.number().nullish() }).nullish()
3479
+ var usageSchema2 = z16.object({
3480
+ input_tokens: z16.number(),
3481
+ input_tokens_details: z16.object({ cached_tokens: z16.number().nullish() }).nullish(),
3482
+ output_tokens: z16.number(),
3483
+ output_tokens_details: z16.object({ reasoning_tokens: z16.number().nullish() }).nullish()
3294
3484
  });
3295
- var textDeltaChunkSchema = z15.object({
3296
- type: z15.literal("response.output_text.delta"),
3297
- item_id: z15.string(),
3298
- delta: z15.string(),
3485
+ var textDeltaChunkSchema = z16.object({
3486
+ type: z16.literal("response.output_text.delta"),
3487
+ item_id: z16.string(),
3488
+ delta: z16.string(),
3299
3489
  logprobs: LOGPROBS_SCHEMA.nullish()
3300
3490
  });
3301
- var errorChunkSchema = z15.object({
3302
- type: z15.literal("error"),
3303
- code: z15.string(),
3304
- message: z15.string(),
3305
- param: z15.string().nullish(),
3306
- sequence_number: z15.number()
3491
+ var errorChunkSchema = z16.object({
3492
+ type: z16.literal("error"),
3493
+ code: z16.string(),
3494
+ message: z16.string(),
3495
+ param: z16.string().nullish(),
3496
+ sequence_number: z16.number()
3307
3497
  });
3308
- var responseFinishedChunkSchema = z15.object({
3309
- type: z15.enum(["response.completed", "response.incomplete"]),
3310
- response: z15.object({
3311
- incomplete_details: z15.object({ reason: z15.string() }).nullish(),
3498
+ var responseFinishedChunkSchema = z16.object({
3499
+ type: z16.enum(["response.completed", "response.incomplete"]),
3500
+ response: z16.object({
3501
+ incomplete_details: z16.object({ reason: z16.string() }).nullish(),
3312
3502
  usage: usageSchema2,
3313
- service_tier: z15.string().nullish()
3503
+ service_tier: z16.string().nullish()
3314
3504
  })
3315
3505
  });
3316
- var responseCreatedChunkSchema = z15.object({
3317
- type: z15.literal("response.created"),
3318
- response: z15.object({
3319
- id: z15.string(),
3320
- created_at: z15.number(),
3321
- model: z15.string(),
3322
- service_tier: z15.string().nullish()
3506
+ var responseCreatedChunkSchema = z16.object({
3507
+ type: z16.literal("response.created"),
3508
+ response: z16.object({
3509
+ id: z16.string(),
3510
+ created_at: z16.number(),
3511
+ model: z16.string(),
3512
+ service_tier: z16.string().nullish()
3323
3513
  })
3324
3514
  });
3325
- var responseOutputItemAddedSchema = z15.object({
3326
- type: z15.literal("response.output_item.added"),
3327
- output_index: z15.number(),
3328
- item: z15.discriminatedUnion("type", [
3329
- z15.object({
3330
- type: z15.literal("message"),
3331
- id: z15.string()
3515
+ var responseOutputItemAddedSchema = z16.object({
3516
+ type: z16.literal("response.output_item.added"),
3517
+ output_index: z16.number(),
3518
+ item: z16.discriminatedUnion("type", [
3519
+ z16.object({
3520
+ type: z16.literal("message"),
3521
+ id: z16.string()
3332
3522
  }),
3333
- z15.object({
3334
- type: z15.literal("reasoning"),
3335
- id: z15.string(),
3336
- encrypted_content: z15.string().nullish()
3523
+ z16.object({
3524
+ type: z16.literal("reasoning"),
3525
+ id: z16.string(),
3526
+ encrypted_content: z16.string().nullish()
3337
3527
  }),
3338
- z15.object({
3339
- type: z15.literal("function_call"),
3340
- id: z15.string(),
3341
- call_id: z15.string(),
3342
- name: z15.string(),
3343
- arguments: z15.string()
3528
+ z16.object({
3529
+ type: z16.literal("function_call"),
3530
+ id: z16.string(),
3531
+ call_id: z16.string(),
3532
+ name: z16.string(),
3533
+ arguments: z16.string()
3344
3534
  }),
3345
- z15.object({
3346
- type: z15.literal("web_search_call"),
3347
- id: z15.string(),
3348
- status: z15.string(),
3349
- action: z15.object({
3350
- type: z15.literal("search"),
3351
- query: z15.string().optional()
3535
+ z16.object({
3536
+ type: z16.literal("web_search_call"),
3537
+ id: z16.string(),
3538
+ status: z16.string(),
3539
+ action: z16.object({
3540
+ type: z16.literal("search"),
3541
+ query: z16.string().optional()
3352
3542
  }).nullish()
3353
3543
  }),
3354
- z15.object({
3355
- type: z15.literal("computer_call"),
3356
- id: z15.string(),
3357
- status: z15.string()
3544
+ z16.object({
3545
+ type: z16.literal("computer_call"),
3546
+ id: z16.string(),
3547
+ status: z16.string()
3548
+ }),
3549
+ z16.object({
3550
+ type: z16.literal("file_search_call"),
3551
+ id: z16.string()
3358
3552
  }),
3359
- z15.object({
3360
- type: z15.literal("file_search_call"),
3361
- id: z15.string()
3553
+ z16.object({
3554
+ type: z16.literal("image_generation_call"),
3555
+ id: z16.string()
3362
3556
  }),
3363
- z15.object({
3364
- type: z15.literal("image_generation_call"),
3365
- id: z15.string()
3557
+ z16.object({
3558
+ type: z16.literal("code_interpreter_call"),
3559
+ id: z16.string(),
3560
+ container_id: z16.string(),
3561
+ code: z16.string().nullable(),
3562
+ outputs: z16.array(
3563
+ z16.discriminatedUnion("type", [
3564
+ z16.object({ type: z16.literal("logs"), logs: z16.string() }),
3565
+ z16.object({ type: z16.literal("image"), url: z16.string() })
3566
+ ])
3567
+ ).nullable(),
3568
+ status: z16.string()
3366
3569
  })
3367
3570
  ])
3368
3571
  });
3369
- var responseOutputItemDoneSchema = z15.object({
3370
- type: z15.literal("response.output_item.done"),
3371
- output_index: z15.number(),
3372
- item: z15.discriminatedUnion("type", [
3373
- z15.object({
3374
- type: z15.literal("message"),
3375
- id: z15.string()
3572
+ var responseOutputItemDoneSchema = z16.object({
3573
+ type: z16.literal("response.output_item.done"),
3574
+ output_index: z16.number(),
3575
+ item: z16.discriminatedUnion("type", [
3576
+ z16.object({
3577
+ type: z16.literal("message"),
3578
+ id: z16.string()
3376
3579
  }),
3377
- z15.object({
3378
- type: z15.literal("reasoning"),
3379
- id: z15.string(),
3380
- encrypted_content: z15.string().nullish()
3580
+ z16.object({
3581
+ type: z16.literal("reasoning"),
3582
+ id: z16.string(),
3583
+ encrypted_content: z16.string().nullish()
3381
3584
  }),
3382
- z15.object({
3383
- type: z15.literal("function_call"),
3384
- id: z15.string(),
3385
- call_id: z15.string(),
3386
- name: z15.string(),
3387
- arguments: z15.string(),
3388
- status: z15.literal("completed")
3585
+ z16.object({
3586
+ type: z16.literal("function_call"),
3587
+ id: z16.string(),
3588
+ call_id: z16.string(),
3589
+ name: z16.string(),
3590
+ arguments: z16.string(),
3591
+ status: z16.literal("completed")
3389
3592
  }),
3390
3593
  codeInterpreterCallItem,
3391
3594
  imageGenerationCallItem,
3392
3595
  webSearchCallItem,
3393
3596
  fileSearchCallItem,
3394
- z15.object({
3395
- type: z15.literal("computer_call"),
3396
- id: z15.string(),
3397
- status: z15.literal("completed")
3597
+ localShellCallItem,
3598
+ z16.object({
3599
+ type: z16.literal("computer_call"),
3600
+ id: z16.string(),
3601
+ status: z16.literal("completed")
3398
3602
  })
3399
3603
  ])
3400
3604
  });
3401
- var responseFunctionCallArgumentsDeltaSchema = z15.object({
3402
- type: z15.literal("response.function_call_arguments.delta"),
3403
- item_id: z15.string(),
3404
- output_index: z15.number(),
3405
- delta: z15.string()
3605
+ var responseFunctionCallArgumentsDeltaSchema = z16.object({
3606
+ type: z16.literal("response.function_call_arguments.delta"),
3607
+ item_id: z16.string(),
3608
+ output_index: z16.number(),
3609
+ delta: z16.string()
3406
3610
  });
3407
- var responseAnnotationAddedSchema = z15.object({
3408
- type: z15.literal("response.output_text.annotation.added"),
3409
- annotation: z15.discriminatedUnion("type", [
3410
- z15.object({
3411
- type: z15.literal("url_citation"),
3412
- url: z15.string(),
3413
- title: z15.string()
3611
+ var responseImageGenerationCallPartialImageSchema = z16.object({
3612
+ type: z16.literal("response.image_generation_call.partial_image"),
3613
+ item_id: z16.string(),
3614
+ output_index: z16.number(),
3615
+ partial_image_b64: z16.string()
3616
+ });
3617
+ var responseCodeInterpreterCallCodeDeltaSchema = z16.object({
3618
+ type: z16.literal("response.code_interpreter_call_code.delta"),
3619
+ item_id: z16.string(),
3620
+ output_index: z16.number(),
3621
+ delta: z16.string()
3622
+ });
3623
+ var responseCodeInterpreterCallCodeDoneSchema = z16.object({
3624
+ type: z16.literal("response.code_interpreter_call_code.done"),
3625
+ item_id: z16.string(),
3626
+ output_index: z16.number(),
3627
+ code: z16.string()
3628
+ });
3629
+ var responseAnnotationAddedSchema = z16.object({
3630
+ type: z16.literal("response.output_text.annotation.added"),
3631
+ annotation: z16.discriminatedUnion("type", [
3632
+ z16.object({
3633
+ type: z16.literal("url_citation"),
3634
+ url: z16.string(),
3635
+ title: z16.string()
3414
3636
  }),
3415
- z15.object({
3416
- type: z15.literal("file_citation"),
3417
- file_id: z15.string(),
3418
- filename: z15.string().nullish(),
3419
- index: z15.number().nullish(),
3420
- start_index: z15.number().nullish(),
3421
- end_index: z15.number().nullish(),
3422
- quote: z15.string().nullish()
3637
+ z16.object({
3638
+ type: z16.literal("file_citation"),
3639
+ file_id: z16.string(),
3640
+ filename: z16.string().nullish(),
3641
+ index: z16.number().nullish(),
3642
+ start_index: z16.number().nullish(),
3643
+ end_index: z16.number().nullish(),
3644
+ quote: z16.string().nullish()
3423
3645
  })
3424
3646
  ])
3425
3647
  });
3426
- var responseReasoningSummaryPartAddedSchema = z15.object({
3427
- type: z15.literal("response.reasoning_summary_part.added"),
3428
- item_id: z15.string(),
3429
- summary_index: z15.number()
3648
+ var responseReasoningSummaryPartAddedSchema = z16.object({
3649
+ type: z16.literal("response.reasoning_summary_part.added"),
3650
+ item_id: z16.string(),
3651
+ summary_index: z16.number()
3430
3652
  });
3431
- var responseReasoningSummaryTextDeltaSchema = z15.object({
3432
- type: z15.literal("response.reasoning_summary_text.delta"),
3433
- item_id: z15.string(),
3434
- summary_index: z15.number(),
3435
- delta: z15.string()
3653
+ var responseReasoningSummaryTextDeltaSchema = z16.object({
3654
+ type: z16.literal("response.reasoning_summary_text.delta"),
3655
+ item_id: z16.string(),
3656
+ summary_index: z16.number(),
3657
+ delta: z16.string()
3436
3658
  });
3437
- var openaiResponsesChunkSchema = z15.union([
3659
+ var openaiResponsesChunkSchema = z16.union([
3438
3660
  textDeltaChunkSchema,
3439
3661
  responseFinishedChunkSchema,
3440
3662
  responseCreatedChunkSchema,
3441
3663
  responseOutputItemAddedSchema,
3442
3664
  responseOutputItemDoneSchema,
3443
3665
  responseFunctionCallArgumentsDeltaSchema,
3666
+ responseImageGenerationCallPartialImageSchema,
3667
+ responseCodeInterpreterCallCodeDeltaSchema,
3668
+ responseCodeInterpreterCallCodeDoneSchema,
3444
3669
  responseAnnotationAddedSchema,
3445
3670
  responseReasoningSummaryPartAddedSchema,
3446
3671
  responseReasoningSummaryTextDeltaSchema,
3447
3672
  errorChunkSchema,
3448
- z15.object({ type: z15.string() }).loose()
3673
+ z16.object({ type: z16.string() }).loose()
3449
3674
  // fallback for unknown chunks
3450
3675
  ]);
3451
3676
  function isTextDeltaChunk(chunk) {
@@ -3466,6 +3691,15 @@ function isResponseCreatedChunk(chunk) {
3466
3691
  function isResponseFunctionCallArgumentsDeltaChunk(chunk) {
3467
3692
  return chunk.type === "response.function_call_arguments.delta";
3468
3693
  }
3694
+ function isResponseImageGenerationCallPartialImageChunk(chunk) {
3695
+ return chunk.type === "response.image_generation_call.partial_image";
3696
+ }
3697
+ function isResponseCodeInterpreterCallCodeDeltaChunk(chunk) {
3698
+ return chunk.type === "response.code_interpreter_call_code.delta";
3699
+ }
3700
+ function isResponseCodeInterpreterCallCodeDoneChunk(chunk) {
3701
+ return chunk.type === "response.code_interpreter_call_code.done";
3702
+ }
3469
3703
  function isResponseOutputItemAddedChunk(chunk) {
3470
3704
  return chunk.type === "response.output_item.added";
3471
3705
  }
@@ -3518,15 +3752,15 @@ function getResponsesModelConfig(modelId) {
3518
3752
  isReasoningModel: false
3519
3753
  };
3520
3754
  }
3521
- var openaiResponsesProviderOptionsSchema = z15.object({
3522
- include: z15.array(
3523
- z15.enum([
3755
+ var openaiResponsesProviderOptionsSchema = z16.object({
3756
+ include: z16.array(
3757
+ z16.enum([
3524
3758
  "reasoning.encrypted_content",
3525
3759
  "file_search_call.results",
3526
3760
  "message.output_text.logprobs"
3527
3761
  ])
3528
3762
  ).nullish(),
3529
- instructions: z15.string().nullish(),
3763
+ instructions: z16.string().nullish(),
3530
3764
  /**
3531
3765
  * Return the log probabilities of the tokens.
3532
3766
  *
@@ -3539,25 +3773,25 @@ var openaiResponsesProviderOptionsSchema = z15.object({
3539
3773
  * @see https://platform.openai.com/docs/api-reference/responses/create
3540
3774
  * @see https://cookbook.openai.com/examples/using_logprobs
3541
3775
  */
3542
- logprobs: z15.union([z15.boolean(), z15.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
3776
+ logprobs: z16.union([z16.boolean(), z16.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
3543
3777
  /**
3544
3778
  * The maximum number of total calls to built-in tools that can be processed in a response.
3545
3779
  * This maximum number applies across all built-in tool calls, not per individual tool.
3546
3780
  * Any further attempts to call a tool by the model will be ignored.
3547
3781
  */
3548
- maxToolCalls: z15.number().nullish(),
3549
- metadata: z15.any().nullish(),
3550
- parallelToolCalls: z15.boolean().nullish(),
3551
- previousResponseId: z15.string().nullish(),
3552
- promptCacheKey: z15.string().nullish(),
3553
- reasoningEffort: z15.string().nullish(),
3554
- reasoningSummary: z15.string().nullish(),
3555
- safetyIdentifier: z15.string().nullish(),
3556
- serviceTier: z15.enum(["auto", "flex", "priority"]).nullish(),
3557
- store: z15.boolean().nullish(),
3558
- strictJsonSchema: z15.boolean().nullish(),
3559
- textVerbosity: z15.enum(["low", "medium", "high"]).nullish(),
3560
- user: z15.string().nullish()
3782
+ maxToolCalls: z16.number().nullish(),
3783
+ metadata: z16.any().nullish(),
3784
+ parallelToolCalls: z16.boolean().nullish(),
3785
+ previousResponseId: z16.string().nullish(),
3786
+ promptCacheKey: z16.string().nullish(),
3787
+ reasoningEffort: z16.string().nullish(),
3788
+ reasoningSummary: z16.string().nullish(),
3789
+ safetyIdentifier: z16.string().nullish(),
3790
+ serviceTier: z16.enum(["auto", "flex", "priority"]).nullish(),
3791
+ store: z16.boolean().nullish(),
3792
+ strictJsonSchema: z16.boolean().nullish(),
3793
+ textVerbosity: z16.enum(["low", "medium", "high"]).nullish(),
3794
+ user: z16.string().nullish()
3561
3795
  });
3562
3796
 
3563
3797
  // src/speech/openai-speech-model.ts
@@ -3567,10 +3801,10 @@ import {
3567
3801
  parseProviderOptions as parseProviderOptions6,
3568
3802
  postJsonToApi as postJsonToApi6
3569
3803
  } from "@ai-sdk/provider-utils";
3570
- import { z as z16 } from "zod/v4";
3571
- var OpenAIProviderOptionsSchema = z16.object({
3572
- instructions: z16.string().nullish(),
3573
- speed: z16.number().min(0.25).max(4).default(1).nullish()
3804
+ import { z as z17 } from "zod/v4";
3805
+ var OpenAIProviderOptionsSchema = z17.object({
3806
+ instructions: z17.string().nullish(),
3807
+ speed: z17.number().min(0.25).max(4).default(1).nullish()
3574
3808
  });
3575
3809
  var OpenAISpeechModel = class {
3576
3810
  constructor(modelId, config) {
@@ -3681,33 +3915,33 @@ import {
3681
3915
  parseProviderOptions as parseProviderOptions7,
3682
3916
  postFormDataToApi
3683
3917
  } from "@ai-sdk/provider-utils";
3684
- import { z as z18 } from "zod/v4";
3918
+ import { z as z19 } from "zod/v4";
3685
3919
 
3686
3920
  // src/transcription/openai-transcription-options.ts
3687
- import { z as z17 } from "zod/v4";
3688
- var openAITranscriptionProviderOptions = z17.object({
3921
+ import { z as z18 } from "zod/v4";
3922
+ var openAITranscriptionProviderOptions = z18.object({
3689
3923
  /**
3690
3924
  * Additional information to include in the transcription response.
3691
3925
  */
3692
- include: z17.array(z17.string()).optional(),
3926
+ include: z18.array(z18.string()).optional(),
3693
3927
  /**
3694
3928
  * The language of the input audio in ISO-639-1 format.
3695
3929
  */
3696
- language: z17.string().optional(),
3930
+ language: z18.string().optional(),
3697
3931
  /**
3698
3932
  * An optional text to guide the model's style or continue a previous audio segment.
3699
3933
  */
3700
- prompt: z17.string().optional(),
3934
+ prompt: z18.string().optional(),
3701
3935
  /**
3702
3936
  * The sampling temperature, between 0 and 1.
3703
3937
  * @default 0
3704
3938
  */
3705
- temperature: z17.number().min(0).max(1).default(0).optional(),
3939
+ temperature: z18.number().min(0).max(1).default(0).optional(),
3706
3940
  /**
3707
3941
  * The timestamp granularities to populate for this transcription.
3708
3942
  * @default ['segment']
3709
3943
  */
3710
- timestampGranularities: z17.array(z17.enum(["word", "segment"])).default(["segment"]).optional()
3944
+ timestampGranularities: z18.array(z18.enum(["word", "segment"])).default(["segment"]).optional()
3711
3945
  });
3712
3946
 
3713
3947
  // src/transcription/openai-transcription-model.ts
@@ -3876,48 +4110,59 @@ var OpenAITranscriptionModel = class {
3876
4110
  };
3877
4111
  }
3878
4112
  };
3879
- var openaiTranscriptionResponseSchema = z18.object({
3880
- text: z18.string(),
3881
- language: z18.string().nullish(),
3882
- duration: z18.number().nullish(),
3883
- words: z18.array(
3884
- z18.object({
3885
- word: z18.string(),
3886
- start: z18.number(),
3887
- end: z18.number()
4113
+ var openaiTranscriptionResponseSchema = z19.object({
4114
+ text: z19.string(),
4115
+ language: z19.string().nullish(),
4116
+ duration: z19.number().nullish(),
4117
+ words: z19.array(
4118
+ z19.object({
4119
+ word: z19.string(),
4120
+ start: z19.number(),
4121
+ end: z19.number()
3888
4122
  })
3889
4123
  ).nullish(),
3890
- segments: z18.array(
3891
- z18.object({
3892
- id: z18.number(),
3893
- seek: z18.number(),
3894
- start: z18.number(),
3895
- end: z18.number(),
3896
- text: z18.string(),
3897
- tokens: z18.array(z18.number()),
3898
- temperature: z18.number(),
3899
- avg_logprob: z18.number(),
3900
- compression_ratio: z18.number(),
3901
- no_speech_prob: z18.number()
4124
+ segments: z19.array(
4125
+ z19.object({
4126
+ id: z19.number(),
4127
+ seek: z19.number(),
4128
+ start: z19.number(),
4129
+ end: z19.number(),
4130
+ text: z19.string(),
4131
+ tokens: z19.array(z19.number()),
4132
+ temperature: z19.number(),
4133
+ avg_logprob: z19.number(),
4134
+ compression_ratio: z19.number(),
4135
+ no_speech_prob: z19.number()
3902
4136
  })
3903
4137
  ).nullish()
3904
4138
  });
3905
4139
 
4140
+ // src/version.ts
4141
+ var VERSION = true ? "2.1.0-beta.11" : "0.0.0-test";
4142
+
3906
4143
  // src/openai-provider.ts
3907
4144
  function createOpenAI(options = {}) {
3908
4145
  var _a, _b;
3909
- const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
4146
+ const baseURL = (_a = withoutTrailingSlash(
4147
+ loadOptionalSetting({
4148
+ settingValue: options.baseURL,
4149
+ environmentVariableName: "OPENAI_BASE_URL"
4150
+ })
4151
+ )) != null ? _a : "https://api.openai.com/v1";
3910
4152
  const providerName = (_b = options.name) != null ? _b : "openai";
3911
- const getHeaders = () => ({
3912
- Authorization: `Bearer ${loadApiKey({
3913
- apiKey: options.apiKey,
3914
- environmentVariableName: "OPENAI_API_KEY",
3915
- description: "OpenAI"
3916
- })}`,
3917
- "OpenAI-Organization": options.organization,
3918
- "OpenAI-Project": options.project,
3919
- ...options.headers
3920
- });
4153
+ const getHeaders = () => withUserAgentSuffix(
4154
+ {
4155
+ Authorization: `Bearer ${loadApiKey({
4156
+ apiKey: options.apiKey,
4157
+ environmentVariableName: "OPENAI_API_KEY",
4158
+ description: "OpenAI"
4159
+ })}`,
4160
+ "OpenAI-Organization": options.organization,
4161
+ "OpenAI-Project": options.project,
4162
+ ...options.headers
4163
+ },
4164
+ `ai-sdk/openai/${VERSION}`
4165
+ );
3921
4166
  const createChatModel = (modelId) => new OpenAIChatLanguageModel(modelId, {
3922
4167
  provider: `${providerName}.chat`,
3923
4168
  url: ({ path }) => `${baseURL}${path}`,
@@ -3992,6 +4237,7 @@ function createOpenAI(options = {}) {
3992
4237
  }
3993
4238
  var openai = createOpenAI();
3994
4239
  export {
4240
+ VERSION,
3995
4241
  createOpenAI,
3996
4242
  openai
3997
4243
  };